├── .gitattributes
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ ├── config.yml
│ └── feature_request.md
├── dependabot.yml
├── pull_request_template.md
└── workflows
│ ├── docs.yml
│ ├── lint-test.yml
│ └── publish.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .vscode
└── extensions.json
├── CHANGELOG.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── commitlint.config.js
├── docs
├── CHANGELOG
│ └── CHANGELOG.md
├── CONTRIBUTING
│ └── CONTRIBUTING.md
├── Usage
│ ├── Advanced.md
│ ├── FastAPI-Helper.md
│ ├── Security.md
│ └── index.md
├── images
│ └── rocket-24.svg
└── index.md
├── docs_src
└── advanced
│ ├── modify-request.py
│ ├── modify-response-particular.py
│ └── modify-response.py
├── mkdocs.yml
├── pyproject.toml
├── scripts
├── gen_ref_pages.py
└── pre_commit_scripts
│ └── ver_sync.py
├── src
└── fastapi_proxy_lib
│ ├── __init__.py
│ ├── core
│ ├── __init__.py
│ ├── _model.py
│ ├── _tool.py
│ ├── http.py
│ ├── tool.py
│ └── websocket.py
│ ├── fastapi
│ ├── __init__.py
│ ├── app.py
│ └── router.py
│ └── py.typed
└── tests
├── __init__.py
├── app
├── echo_http_app.py
├── echo_ws_app.py
└── tool.py
├── conftest.py
├── test_core_lib.py
├── test_docs_examples.py
├── test_http.py
├── test_ws.py
└── tool.py
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report 🐛
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | ## Describe the bug
11 |
12 | A clear and concise description of what the bug is.
13 |
14 | ## To Reproduce
15 |
16 | Steps to reproduce the behavior:
17 |
18 | 1. Go to '...'
19 | 2. Click on '....'
20 | 3. Scroll down to '....'
21 | 4. See error
22 |
23 | ## Expected behavior
24 |
25 | A clear and concise description of what you expected to happen.
26 |
27 | ## Configuration
28 |
29 | ```shell
30 | # use `pip freeze` to list all deps
31 | pip freeze
32 | ```
33 |
34 | - Python version:
35 | - OS version:
36 | - deps version:
37 | - ...
38 |
39 | ## Additional context
40 |
41 | Add any other context about the problem here.
42 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | contact_links:
2 | - name: I have a question 🤔
3 | url: https://github.com/WSH032/fastapi-proxy-lib/discussions
4 | about: If you have any question that's not clearly a bug, please open a discussion first.
5 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request 💡
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | ## Have you discussed it?
11 |
12 | - [ ] I have discussed this feature request in discussions, and developers ask me directly to create an issue here.
13 |
14 | ## Describe your feature request
15 |
16 | A clear and concise description of what the feature is.
17 |
18 | ## Is your feature request related to a problem? Please describe
19 |
20 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
21 |
22 | ## Describe the solution you'd like
23 |
24 | A clear and concise description of what you want to happen.
25 |
26 | ## Describe alternatives you've considered
27 |
28 | A clear and concise description of any alternative solutions or features you've considered.
29 |
30 | ## Additional context
31 |
32 | Add any other context or screenshots about the feature request here.
33 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | # GitHub Actions
9 | - package-ecosystem: "github-actions"
10 | directory: "/"
11 | schedule:
12 | interval: "daily"
13 | # Python
14 | - package-ecosystem: "pip"
15 | directory: "/"
16 | schedule:
17 | interval: "daily"
18 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 |
3 |
4 | # Summary
5 |
6 |
7 |
8 | # Checklist
9 |
10 | - [ ] I've read `CONTRIBUTING.md`.
11 | - [ ] I understand that this PR may be closed in case there was no previous discussion. (This doesn't apply to typos!)
12 | - [ ] I've added a test for each change that was introduced, and I tried as much as possible to make a single atomic change.
13 | - [ ] I've updated the documentation accordingly.
14 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | # copy from: https://github.com/frankie567/httpx-ws/blob/main/.github/workflows/docs.yml
2 |
3 | # WARNING: Do not change the name of this file, keep `docs.yml`.
4 | # markdown badges are hard-coded to point to this file.
5 |
6 | name: Deploy documentation
7 |
8 | # Since document updates may be frequent,
9 | # we do not run tests when deploying documents,
10 | # instead test during the PR stage.
11 | on:
12 | push:
13 | branches:
14 | - main
15 | workflow_dispatch:
16 |
17 | # Allow one concurrent deployment
18 | concurrency:
19 | group: "pages"
20 | cancel-in-progress: true
21 |
22 | # Default to bash
23 | defaults:
24 | run:
25 | shell: bash
26 |
27 | jobs:
28 | build-docs:
29 | runs-on: ubuntu-latest
30 | steps:
31 | - uses: actions/checkout@v4
32 | # https://github.com/timvink/mkdocs-git-revision-date-localized-plugin#note-when-using-build-environments
33 | with:
34 | fetch-depth: 0
35 | - name: Set up Python
36 | uses: actions/setup-python@v5
37 | with:
38 | python-version: "3.10"
39 | - name: Install dependencies
40 | run: |
41 | python -m pip install --upgrade pip
42 | pip install hatch
43 | - name: Build
44 | run: hatch run docs:docs-build
45 | - name: Upload artifact
46 | uses: actions/upload-pages-artifact@v3
47 | with:
48 | path: ./site
49 |
50 | deploy-docs:
51 | needs: build-docs
52 | # Grant GITHUB_TOKEN the permissions required to make a Pages deployment
53 | permissions:
54 | pages: write # to deploy to Pages
55 | id-token: write # to verify the deployment originates from an appropriate source
56 | environment:
57 | name: github-pages
58 | url: ${{ steps.deployment.outputs.page_url }}
59 | runs-on: ubuntu-latest
60 | steps:
61 | - name: Deploy to GitHub Pages
62 | id: deployment
63 | uses: actions/deploy-pages@v4
64 |
--------------------------------------------------------------------------------
/.github/workflows/lint-test.yml:
--------------------------------------------------------------------------------
1 | # WARNING: Do not change the name of this file, keep `lint-test.yml`.
2 | # `workflow_call` needs the name of the workflow file to work.
3 |
4 | name: Lint check and test
5 |
6 | # We only automatically run checks for PRs.
7 | # It is best to avoid direct commits to the main branch, instead make a PR for checks.
8 | # For the pushes to the main branch, the checks is done by `publish.yml` when publish.
9 | on:
10 | pull_request:
11 | workflow_dispatch:
12 | # NOTE: set `secrets: inherit` when call this workflow from other workflow.
13 | workflow_call:
14 |
15 | jobs:
16 | lint-check:
17 | runs-on: ubuntu-latest
18 | strategy:
19 | fail-fast: false
20 | matrix:
21 | python_version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
22 | steps:
23 | - uses: actions/checkout@v4
24 | - name: Set up Python
25 | uses: actions/setup-python@v5
26 | with:
27 | python-version: ${{ matrix.python_version }}
28 | - name: Install dependencies
29 | run: |
30 | python -m pip install --upgrade pip
31 | pip install hatch
32 | - name: Lint check and type check
33 | run: |
34 | hatch run lint-check
35 |
36 | test:
37 | runs-on: ${{ matrix.os }}
38 | strategy:
39 | fail-fast: false
40 | matrix:
41 | python_version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
42 | os: ["ubuntu-latest", "windows-latest"]
43 | steps:
44 | - uses: actions/checkout@v4
45 | - name: Set up Python
46 | uses: actions/setup-python@v5
47 | with:
48 | python-version: ${{ matrix.python_version }}
49 | - name: Install dependencies
50 | run: |
51 | python -m pip install --upgrade pip
52 | pip install hatch
53 | - name: Test
54 | # Prevent unknown errors from causing long-term blockage of ci during testing
55 | # can be adjusted according to the actual test time
56 | timeout-minutes: 5
57 | run: |
58 | hatch run test
59 | - name: Upload coverage reports to Codecov with GitHub Action
60 | uses: codecov/codecov-action@v5
61 | with:
62 | token: ${{ secrets.CODECOV_TOKEN }}
63 | verbose: true
64 | fail_ci_if_error: true
65 |
66 | # https://github.com/marketplace/actions/alls-green#why
67 | lint-test-all-green: # This job does nothing and is only used for the branch protection
68 | if: always() # IMPORTANT: mandatory
69 | needs:
70 | - lint-check
71 | - test
72 | runs-on: ubuntu-latest
73 | steps:
74 | - name: Decide whether the needed jobs succeeded or failed
75 | uses: re-actors/alls-green@release/v1
76 | with:
77 | jobs: ${{ toJSON(needs) }}
78 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | # refer to: https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
2 |
3 | # WARNING: Do not change the name of this file, keep `publish.yml`.
4 | # "trusted publishing" will check the name of the workflow file.
5 |
6 | name: Publish Python 🐍 distribution 📦 to PyPI
7 |
8 | on:
9 | push:
10 | tags:
11 | - v*
12 |
13 | jobs:
14 | lint-test:
15 | name: Lint check and test 🧪
16 | uses: ./.github/workflows/lint-test.yml
17 | secrets: inherit # IMPORTANT: sub-workflow needs secrets for uploading codecov
18 |
19 | build-dist:
20 | needs:
21 | - lint-test
22 | name: Build distribution 📦
23 | runs-on: ubuntu-latest
24 | steps:
25 | - uses: actions/checkout@v4
26 | - name: Set up Python
27 | uses: actions/setup-python@v5
28 | with:
29 | python-version: "3.10"
30 | - name: Install dependencies
31 | run: |
32 | python -m pip install --upgrade pip
33 | pip install hatch
34 | - name: Build a binary wheel and a source tarball
35 | run: |
36 | hatch build
37 | - name: Store the distribution packages
38 | uses: actions/upload-artifact@v4
39 | with:
40 | name: python-package-distributions
41 | path: dist/
42 | if-no-files-found: error
43 |
44 | publish-to-pypi:
45 | needs:
46 | - build-dist
47 | name: Publish Python 🐍 distribution 📦 to PyPI
48 | runs-on: ubuntu-latest
49 | environment:
50 | name: pypi
51 | url: https://pypi.org/p/fastapi-proxy-lib
52 | permissions:
53 | id-token: write # IMPORTANT: mandatory for trusted publishing
54 | steps:
55 | - name: Download all the dists
56 | uses: actions/download-artifact@v4
57 | with:
58 | name: python-package-distributions
59 | path: dist/
60 | - name: Publish distribution 📦 to PyPI
61 | uses: pypa/gh-action-pypi-publish@release/v1
62 |
63 | github-release:
64 | needs:
65 | - publish-to-pypi
66 | name: Create GitHub release 🏷️
67 | runs-on: ubuntu-latest
68 | permissions:
69 | contents: write # IMPORTANT: mandatory for creating release
70 | steps:
71 | - name: Download all the dists
72 | uses: actions/download-artifact@v4
73 | with:
74 | name: python-package-distributions
75 | path: dist/
76 | - name: Create release
77 | uses: ncipollo/release-action@v1
78 | with:
79 | draft: true
80 | body: ${{ github.event.head_commit.message }}
81 | artifacts: dist/*.whl,dist/*.tar.gz
82 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
105 | __pypackages__/
106 |
107 | # Celery stuff
108 | celerybeat-schedule
109 | celerybeat.pid
110 |
111 | # SageMath parsed files
112 | *.sage.py
113 |
114 | # Environments
115 | .env
116 | .venv
117 | env/
118 | venv/
119 | ENV/
120 | env.bak/
121 | venv.bak/
122 |
123 | # Spyder project settings
124 | .spyderproject
125 | .spyproject
126 |
127 | # Rope project settings
128 | .ropeproject
129 |
130 | # mkdocs documentation
131 | /site
132 |
133 | # mypy
134 | .mypy_cache/
135 | .dmypy.json
136 | dmypy.json
137 |
138 | # Pyre type checker
139 | .pyre/
140 |
141 | # pytype static type analyzer
142 | .pytype/
143 |
144 | # Cython debug symbols
145 | cython_debug/
146 |
147 | # PyCharm
148 | # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
149 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
150 | # and can be added to the global gitignore or merged into this file. For a more nuclear
151 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
152 | #.idea/
153 |
154 | # only track the `extensions.json`
155 | /.vscode/*
156 | !/.vscode/extensions.json
157 |
158 | # ignore temporary files
159 | Untitled*.ipynb
160 | Untitled*.py
161 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # See https://pre-commit.com for more information
2 | # See https://pre-commit.com/hooks.html for more hooks
3 |
4 | # # set `default_language_version` need this version of Python existed on the computer
5 | # default_language_version:
6 | # python: python3.10
7 |
8 | default_install_hook_types: [pre-commit, commit-msg]
9 |
10 | repos:
11 | - repo: https://github.com/pre-commit/pre-commit-hooks
12 | rev: v5.0.0
13 | hooks:
14 | - id: no-commit-to-branch
15 | - id: check-added-large-files
16 | - id: check-toml
17 | - id: check-json
18 | - id: check-yaml
19 | args:
20 | - --unsafe
21 | - id: end-of-file-fixer
22 | - id: trailing-whitespace
23 | # ruff must before black
24 | - repo: https://github.com/astral-sh/ruff-pre-commit
25 | rev: v0.11.2
26 | hooks:
27 | - id: ruff
28 | alias: ruff # NOTE: don't change this alias, it's used in `ver_sync.py`, keep consistent with `pyproject.toml`
29 | args: [--fix, --exit-non-zero-on-fix]
30 | - repo: https://github.com/psf/black-pre-commit-mirror
31 | rev: 25.1.0
32 | hooks:
33 | - id: black
34 | alias: black # NOTE: don't change this alias, it's used in `ver_sync.py`, keep consistent with `pyproject.toml`
35 | - repo: https://github.com/codespell-project/codespell
36 | rev: v2.4.1
37 | hooks:
38 | - id: codespell
39 | alias: codespell # NOTE: don't change this alias, it's used in `ver_sync.py`, keep consistent with `pyproject.toml`
40 | additional_dependencies:
41 | - tomli
42 | - repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook
43 | rev: v9.22.0
44 | hooks:
45 | - id: commitlint
46 | stages: [commit-msg]
47 | # NOTE: the dependencies must consistent with `commitlint.config.js`
48 | additional_dependencies: ["@commitlint/config-conventional"]
49 | - repo: local
50 | hooks:
51 | - id: ver_sync
52 | stages: [pre-commit]
53 | name: synchronize versions of lint tools
54 | entry: python scripts/pre_commit_scripts/ver_sync.py
55 | language: python
56 | additional_dependencies:
57 | - tomlkit == 0.12.* # TODO: Once it releases version 1.0.0, we will remove this restriction.
58 | - ruamel.yaml == 0.18.* # TODO: Once it releases version 1.0.0, we will remove this restriction.
59 | - packaging == 23.*
60 |
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "njpwerner.autodocstring",
4 | "ms-python.black-formatter",
5 | "ms-python.vscode-pylance",
6 | "ms-python.python",
7 | "charliermarsh.ruff"
8 | ]
9 | }
10 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | # Changelog
6 |
7 | All notable changes to this project will be documented in this file.
8 |
9 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
10 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html)(**After we publish 1.0.0**).
11 |
12 | - `Added` for new features.
13 | - `Changed` for changes in existing functionality.
14 | - `Deprecated` for soon-to-be removed features.
15 | - `Removed` for now removed features.
16 | - `Fixed` for any bug fixes.
17 | - `Security` in case of vulnerabilities.
18 | - `[YANKED]` for deprecated releases.
19 | - `Internal` for internal changes. Only for maintainers.
20 |
21 |
22 |
23 |
24 | ## [Unreleased]
25 |
26 | ## [0.3.0] - 2025-03-25
27 |
28 | ### Fixed
29 |
30 | - [#57](https://github.com/WSH032/fastapi-proxy-lib/pull/57) - fix: error in handling multiple query parameters (#56). Thanks [@yhl-cs](https://github.com/yhl-cs)!
31 |
32 | Now `fastapi-proxy-lib` can handle multiple query parameters (e.g. `foo?a=1&a=2`) correctly. Previously, it would only keep the last one (e.g. `foo?a=2`).
33 |
34 | ## [0.2.0] - 2025-01-15
35 |
36 | ### Added
37 |
38 | - [#37](https://github.com/WSH032/fastapi-proxy-lib/pull/37) - docs: add example of `Modify (redefine) response only to particular endpoint`. Thanks [@pavelsr](https://github.com/pavelsr)!
39 |
40 | ### Changed
41 |
42 | - [#30](https://github.com/WSH032/fastapi-proxy-lib/pull/30) - fix(internal): use `websocket` in favor of `websocket_route`. Thanks [@WSH032](https://github.com/WSH032)!
43 |
44 | ### Removed
45 |
46 | - [#49](https://github.com/WSH032/fastapi-proxy-lib/pull/49) - Drop support for `Python 3.8`.
47 |
48 | ### Fixed
49 |
50 | - [#46](https://github.com/WSH032/fastapi-proxy-lib/pull/46) - fix: don't use module-level logging methods. Thanks [@dvarrazzo](https://github.com/dvarrazzo)
51 | - [#49](https://github.com/WSH032/fastapi-proxy-lib/pull/49) - fix!: bump `httpx-ws >= 0.7.1` to fix frankie567/httpx-ws#29. Thanks [@WSH032](https://github.com/WSH032)!
52 |
53 | ### Security
54 |
55 | - [#50](https://github.com/WSH032/fastapi-proxy-lib/pull/50) - fix(security): add `localhost` rule to `default_proxy_filter`. Thanks [@WSH032](https://github.com/WSH032)!
56 |
57 | ### Internal
58 |
59 | - [#47](https://github.com/WSH032/fastapi-proxy-lib/pull/47) - test: do not use deprecated and removed APIs of httpx. Thanks [@WSH032](https://github.com/WSH032)!
60 |
61 | ## [0.1.0] - 2023-12-01
62 |
63 | ### Security
64 |
65 | - [#10](https://github.com/WSH032/fastapi-proxy-lib/pull/10) - fix security vulnerabilities of cookies leakage between different users. Thanks [@WSH032](https://github.com/WSH032)!
66 |
67 | ### Removed
68 |
69 | - [#10](https://github.com/WSH032/fastapi-proxy-lib/pull/10) - Remove support for setting cookies at the `AsyncClient` level. Thanks [@WSH032](https://github.com/WSH032)!
70 |
71 | ## [0.0.1b0] - 2023-11-27 [YANKED]
72 |
73 | !!! danger
74 | **This version has security vulnerabilities, please stop using it.**
75 |
76 | [unreleased]: https://github.com/WSH032/fastapi-proxy-lib/tree/HEAD
77 | [0.3.0]: https://github.com/WSH032/fastapi-proxy-lib/releases/tag/v0.3.0
78 | [0.2.0]: https://github.com/WSH032/fastapi-proxy-lib/releases/tag/v0.2.0
79 | [0.1.0]: https://github.com/WSH032/fastapi-proxy-lib/releases/tag/v0.1.0
80 | [0.0.1b0]: https://github.com/WSH032/fastapi-proxy-lib/releases/tag/v0.0.1b0
81 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | # Contributing
6 |
7 | > The guide is modified from [mkdocstrings](https://mkdocstrings.github.io/contributing/#contributing)
8 |
9 | Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given.
10 |
11 | ## Environment setup
12 |
13 | First, `fork` and `clone` the repository, then `cd` to the directory.
14 |
15 | We use [`hatch`](https://github.com/pypa/hatch) and [`pre-commit`](https://pre-commit.com/) to manage our project.
16 |
17 | You can install them with:
18 |
19 | ```shell
20 | # https://pypa.github.io/pipx/
21 | python3 -m pip install --user pipx
22 |
23 | pipx install hatch
24 | pipx install pre-commit
25 | ```
26 |
27 | Then, initialize the env with:
28 |
29 | ```shell
30 | # Init pre-commit
31 | # https://pre-commit.com/#3-install-the-git-hook-scripts
32 | pre-commit install
33 | pre-commit run --all-files
34 |
35 | # https://hatch.pypa.io/latest/environment/
36 | hatch shell
37 | ```
38 |
39 | That's all! Now, you can start to develop.
40 |
41 | ## Code style
42 |
43 | The source code is in `src/`
44 |
45 | We use [Ruff](https://github.com/astral-sh/ruff), [Blcak](https://github.com/psf/black), [Pyright](https://github.com/Microsoft/pyright/)
46 | and [Codespell](https://github.com/codespell-project/codespell) to check our code style and lint.
47 |
48 | Please check `pyproject.toml` to know our style.
49 |
50 | If you want to format or lint-fix your code, you can use the following command:
51 |
52 | ```shell
53 | hatch run lint
54 | ```
55 |
56 | or, with `pre-commit`:
57 |
58 | ```shell
59 | pre-commit run -a
60 | ```
61 |
62 | or, dry run:
63 |
64 | ```shell
65 | hatch run lint-check
66 | ```
67 |
68 | !!! tip
69 | If you use `VSCode`, we strongly recommend you to install the extensions in `.vscode/extensions.json`.
70 | Because our code style rules are quite strict.
71 | These extensions can help you know where need to be fixed when coding.
72 |
73 | ## Testing
74 |
75 | We use [pytest](https://docs.pytest.org/en/stable/) to test our code.
76 |
77 | The test source code is in `tests/`
78 |
79 | You can run the testing with:
80 |
81 | ```shell
82 | hatch run test
83 | ```
84 |
85 | ## Documentation
86 |
87 | We use [mkdocs](https://www.mkdocs.org), [mkdocs-material](https://squidfunk.github.io/mkdocs-material), [mkdocstrings](https://mkdocstrings.github.io) to build our documentation.
88 |
89 | The documentation source code is in `docs/`, `mkdocs.yml`,
90 | may be there is also some source code in `scripts/` or somewhere (check `mkdocs.yml` to find that).
91 |
92 | Live-reloading docs:
93 |
94 | ```shell
95 | hatch run docs:mkdocs serve
96 | ```
97 |
98 | Build docs:
99 |
100 | ```shell
101 | hatch run docs:docs-build
102 | ```
103 |
104 | ## PR
105 |
106 | - PRs should target the `main` branch.
107 | - Keep branches up to date by `rebase` before merging.
108 | - Do not add multiple unrelated things in same PR.
109 | - Do not submit PRs where you just take existing lines and reformat them without changing what they do.
110 | - Do not change other parts of the code that are not yours for formatting reasons.
111 | - Do not use your clone's main branch to make a PR - create a branch and PR that.
112 |
113 | ### Edit `CHANGELOG.md`
114 |
115 | If you have made the corresponding changes, please record them in `CHANGELOG.md`.
116 |
117 | ### Commit message convention
118 |
119 | Commit messages must follow [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/),
120 | or, `pre-commit` may be reject your commit.
121 |
122 | !!! info
123 | If you don't know how to finish these, it's okay, feel free to initiate a PR, I will help you continue.
124 |
125 | ## More
126 |
127 | There may still be some useful commands in `pyproject.toml`, you can refer to [hatch/environment/scripts](https://hatch.pypa.io/latest/config/environment/overview/#scripts) to use them.
128 |
129 | !!! info
130 | If you find that the commands given in the above examples are incorrect, please open an issue, we greatly appreciate it.
131 |
132 | ---
133 |
134 | ## 😢
135 |
136 | !!! warning
137 | The following 👇 content is for the maintainers of this project, may be you don't need to read it.
138 |
139 | ---
140 |
141 | ## deploy-docs
142 |
143 | please refer to `.github/workflows/docs.yml`
144 |
145 | ## CI: lint-test
146 |
147 | please refer to `.github/workflows/lint-test.yml`
148 |
149 | ## CI: pre-commit-ci auto-update
150 |
151 | Every Monday, `pre-commit-ci` will send a PR for automatic hook version updates, which will trigger a local `ver_sync` hook.
152 |
153 | The `ver_sync` hook will maintain lint tools version consistency between `.pre-commit-config.yaml` and `pyproject.toml`.
154 |
155 | Please check whether the `ver_sync` works properly, then you can accept the PR.
156 |
157 | ## Publish and Release 🚀
158 |
159 | **^^First, check-out to a new branch, edit `CHANGELOG.md` to record the changes.^^**
160 |
161 | Then, please refer to:
162 |
163 | - `.github/workflows/publish.yml`
164 | -
165 | -
166 |
167 | Update version in **^^new branch^^** with:
168 |
169 | ```shell
170 | git add .
171 | hatch version {new_version}
172 | ```
173 |
174 | It will create a commit and tag automatically.
175 |
176 | Then, push the **new branch** with **tag** to GitHub, and create a PR to `main` branch.
177 |
178 | !!! warning
179 | The `bump version` PR must have **only one commit with the corresponding tag**; otherwise, it will be rejected.
180 |
181 | Review the PR, if it's ok, **rebase** it to `main` branch **^^in local^^**
182 |
183 | !!! warning
184 | **DO NOT rebase with tag in GitHub**, refer to
185 |
186 | Check if everything is ok, for example:
187 |
188 | - **check if the tag is on the `main` branch**.
189 | - check if the link in `CHANGELOG.md` is correct.
190 |
191 | If so, make a `approve` in environment `pypi` for the workflow.
192 |
193 | After that, the `publish.yml` workflow will build and publish the package to PyPI.
194 |
195 | Finally, edit the `draft release` created by `publish.yml` workflow, and publish the release.
196 |
197 | !!! warning
198 | The creating of tag needs signature verification,
199 | please refer to
200 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | # FastAPI Proxy Lib
6 |
7 |
8 | HTTP/WebSocket proxy for starlette/FastAPI
9 |
10 |
11 | | | |
12 | | - | - |
13 | | CI/CD | [![CI: lint-test]][CI: lint-test#link] [![pre-commit.ci status]][pre-commit.ci status#link] [![CI: docs]][CI: docs#link] [![CI: publish]][CI: publish#link] |
14 | | Code | [![codecov]][codecov#link] [![Code style: black]][Code style: black#link] [![Ruff]][Ruff#link] [![Checked with pyright]][Checked with pyright#link] |
15 | | Package | [![PyPI - Version]][PyPI#link] [![PyPI - Downloads]][PyPI#link] [![PyPI - Python Version]][PyPI#link] |
16 | | Meta | [![Hatch project]][Hatch project#link] [![GitHub License]][GitHub License#link] |
17 |
18 | ---
19 |
20 | Documentation:
21 |
22 | Source Code:
23 |
24 | ---
25 |
26 | ## Features
27 |
28 | - [X] **Out of the box !** [Helper functions](#quick-start) to get FastAPI `app`/`router` for proxy conveniently.
29 | - [x] **Only `Starlette` is required** for it to work ([`FastAPI` is optional](#installation)).
30 | - [x] Support both **HTTP** and **WebSocket** proxy.
31 | - [x] Supports all HTTP methods (`GET`, `POST`, etc.)
32 | - [x] Support both **reverse** proxy and **forward** proxy.
33 | - [x] **Transparently** and **losslessly** handle all proxy requests,
34 | Including **HTTP headers**, **cookies**, **query parameters**, **body**, etc.
35 | - [x] Asynchronous streaming transfer, support **file proxy**.
36 | - [x] `fastapi-proxy-lib` value [privacy security](https://wsh032.github.io/fastapi-proxy-lib/Usage/Security/).
37 |
38 | ### other features
39 |
40 | - [x] Strict linting and strict-mode Pyright type checking.
41 | - [x] **100%** [Type Completeness](https://microsoft.github.io/pyright/#/typed-libraries?id=type-completeness), [Code coverage of **over 95%**][codecov#link].
42 | - [x] Forced keep-alive connections, minimizing proxy latency.
43 | - [x] Handle errors as gracefully as possible.
44 | - `fastapi-proxy-lib` has a well-designed mechanism behind the scenes to handle exceptional websocket disconnections for you.
45 | - `fastapi-proxy-lib` will exit as gracefully as possible.
46 |
47 | ### `FastAPI Proxy Lib` stands on the shoulders of giants
48 |
49 | - [httpx](https://github.com/encode/httpx) for HTTP proxy
50 | - [httpx-ws](https://github.com/frankie567/httpx-ws) for WebSocket proxy
51 |
52 | So, it perfectly supports all features of [httpx.AsyncClient](https://www.python-httpx.org/advanced/#client-instances), you can even use your custom `AsyncClient`, [`Transport`](https://www.python-httpx.org/advanced/#custom-transports).
53 |
54 | See [Advanced Usage](https://wsh032.github.io/fastapi-proxy-lib/Usage/Advanced/) for more details.
55 |
56 | > except `cookies` and `base_url` arguments, which are taken over by `fastapi-proxy-lib`
57 |
58 | ## Installation
59 |
60 | > !!! note
61 | >
62 | > We follow semantic versioning.
63 | > This is a young project, and before 1.0.0, there may be changes in the API (we try to avoid that).
64 | > We will release security updates at any time, please try to keep the version updated, instead of pinning to the old version.
65 | > pin to `major version` is recommended.
66 | > visit our `CHANGELOG.md` for more info.
67 |
68 | ```shell
69 | pip install fastapi-proxy-lib[standard]
70 | ```
71 |
72 | Perhaps you've noticed that we're installing `fastapi-proxy-lib[standard]` instead of `fastapi-proxy-lib`. The difference is:
73 |
74 | - The former will install `FastAPI` at the same time.
75 | - The latter installs only the basic dependencies for `fastapi-proxy-lib`.
76 |
77 | If you **only need to use this library with Starlette**, you only need to install the latter.
78 |
79 | ## Quick start
80 |
81 | With the helper functions, get the FastAPI proxy server app is very convenient and out of the box:
82 |
83 | ```python
84 | from fastapi_proxy_lib.fastapi.app import reverse_http_app
85 |
86 | app = reverse_http_app(base_url="http://www.example.com/foo/")
87 | ```
88 |
89 | That's all! Now, you can launch the proxy server with `uvicorn`:
90 |
91 | ```shell
92 | uvicorn main:app --host 127.0.0.1 --port 8000
93 | ```
94 |
95 | Then, visit `http://127.0.0.1:8000/bar?baz=1`, you will get the response from `http://www.example.com/foo/bar?baz=1`.
96 |
97 | **[Continue, please visit to our documentation 📚 for more details](https://wsh032.github.io/fastapi-proxy-lib/)**:
98 |
99 | - support for `FastAPI router`
100 | - only `Starlette` dependency
101 | - `WebSocket` proxy
102 |
103 | ## development
104 |
105 | - If you find any issues, please don't hesitate to [open an issue](https://github.com/WSH032/fastapi-proxy-lib/issues).
106 | - If you need assistance, feel free to [start a discussion](https://github.com/WSH032/fastapi-proxy-lib/discussions).
107 | - Follow our `CONTRIBUTING.md`, [PR Welcome!](https://github.com/WSH032/fastapi-proxy-lib/pulls)
108 | - Security 😰❗: We value any security vulnerabilities, [please report to us privately](https://github.com/WSH032/fastapi-proxy-lib/security), pretty appreciated for that.
109 |
110 | English is not the native language of the author (me), so if you find any areas for improvement in the documentation, your feedback is welcome.
111 |
112 | If you think this project helpful, consider giving it a star , which makes me happy. :smile:
113 |
114 | ## Refers
115 |
116 | [Can fastapi proxy another site as a response to the request?](https://github.com/tiangolo/fastapi/discussions/7382)
117 |
118 | ## License
119 |
120 | This project is licensed under the terms of the *Apache License 2.0*.
121 |
122 |
123 |
124 |
125 | [CI: lint-test]: https://github.com/WSH032/fastapi-proxy-lib/actions/workflows/lint-test.yml/badge.svg
126 | [CI: lint-test#link]: https://github.com/WSH032/fastapi-proxy-lib/actions/workflows/lint-test.yml
127 | [CI: docs]: https://github.com/WSH032/fastapi-proxy-lib/actions/workflows/docs.yml/badge.svg
128 | [CI: docs#link]: https://github.com/WSH032/fastapi-proxy-lib/actions/workflows/docs.yml
129 | [CI: publish]: https://github.com/WSH032/fastapi-proxy-lib/actions/workflows/publish.yml/badge.svg
130 | [CI: publish#link]: https://github.com/WSH032/fastapi-proxy-lib/actions/workflows/publish.yml
131 | [pre-commit.ci status]: https://results.pre-commit.ci/badge/github/WSH032/fastapi-proxy-lib/main.svg
132 | [pre-commit.ci status#link]: https://results.pre-commit.ci/latest/github/WSH032/fastapi-proxy-lib/main
133 |
134 | [Code style: black]: https://img.shields.io/badge/code%20style-black-000000.svg
135 | [Code style: black#link]: https://github.com/psf/black
136 | [GitHub License]: https://img.shields.io/github/license/WSH032/fastapi-proxy-lib?color=9400d3
137 | [GitHub License#link]: https://github.com/WSH032/fastapi-proxy-lib/blob/main/LICENSE
138 | [Ruff]: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json
139 | [Ruff#link]: https://github.com/astral-sh/ruff
140 | [Checked with pyright]: https://microsoft.github.io/pyright/img/pyright_badge.svg
141 | [Checked with pyright#link]: https://microsoft.github.io/pyright
142 |
143 | [PyPI - Version]: https://img.shields.io/pypi/v/fastapi-proxy-lib?logo=pypi&label=PyPI&logoColor=gold
144 | [PyPI - Downloads]: https://img.shields.io/pypi/dm/fastapi-proxy-lib?color=blue&label=Downloads&logo=pypi&logoColor=gold
145 | [PyPI - Python Version]: https://img.shields.io/pypi/pyversions/fastapi-proxy-lib?logo=python&label=Python&logoColor=gold
146 | [PyPI#link]: https://pypi.org/project/fastapi-proxy-lib
147 |
148 | [Hatch project]: https://img.shields.io/badge/%F0%9F%A5%9A-Hatch-4051b5.svg
149 | [Hatch project#link]: https://github.com/pypa/hatch
150 | [codecov]: https://codecov.io/gh/WSH032/fastapi-proxy-lib/graph/badge.svg?token=62QQU06E8X
151 | [codecov#link]: https://codecov.io/gh/WSH032/fastapi-proxy-lib
152 |
--------------------------------------------------------------------------------
/commitlint.config.js:
--------------------------------------------------------------------------------
1 | // refer to: https://commitlint.js.org/#/reference-configuration?id=shareable-configuration
2 | // Rule: https://karma-runner.github.io/6.4/dev/git-commit-msg.html
3 | // Rule: https://github.com/conventional-changelog/commitlint/tree/master/@commitlint/config-conventional
4 |
5 | // NOTE: the extends must consistent with `.pre-commit-config.yaml`
6 | module.exports = { extends: ['@commitlint/config-conventional'] };
7 |
--------------------------------------------------------------------------------
/docs/CHANGELOG/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | --8<-- "CHANGELOG.md"
2 |
--------------------------------------------------------------------------------
/docs/CONTRIBUTING/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | --8<-- "CONTRIBUTING.md"
2 |
--------------------------------------------------------------------------------
/docs/Usage/Advanced.md:
--------------------------------------------------------------------------------
1 | # Advanced
2 |
3 | For the following scenarios, you might prefer [fastapi_proxy_lib.core][]:
4 |
5 | - When you need to use proxies with **only** `Starlette` dependencies (without `FastAPI`).
6 | - When you need more fine-grained control over parameters and lifespan event.
7 | - When you need to further process the input and output before and after the proxy (similar to middleware).
8 |
9 | We will demonstrate with `FastAPI`,
10 | but you can completely switch to the `Starlette` approach,
11 | which is officially supported by this project.
12 |
13 | ## Starlette Support
14 |
15 | **^^[Please visit the `ReverseHttpProxy#examples` to view the demo with annotations :material-file-document: ][fastapi_proxy_lib.core.http.ReverseHttpProxy--examples]^^**.
16 |
17 | Also (without annotations):
18 |
19 | - [`ForwardHttpProxy#examples`][fastapi_proxy_lib.core.http.ForwardHttpProxy--examples]
20 | - [`ReverseWebSocketProxy#examples`][fastapi_proxy_lib.core.websocket.ReverseWebSocketProxy--examples]
21 |
22 | ## Modify request
23 |
24 | In some cases, you may want to make final modifications before sending a request, such as performing behind-the-scenes authentication by modifying the headers of request.
25 |
26 | `httpx` provides comprehensive authentication support, and `fastapi-proxy-lib` offers first-class support for `httpx`.
27 |
28 | See
29 |
30 | You can refer following example to implement a simple authentication:
31 |
32 | ```python
33 | --8<-- "docs_src/advanced/modify-request.py"
34 | ```
35 |
36 | visit `/headers` to see the result which contains `"X-Authentication": "bearer_token"` header.
37 |
38 | ## Modify response
39 |
40 | In some cases, you may want to make final modifications before return the response to the client, such as transcoding video response streams.
41 |
42 | See [issue#15](https://github.com/WSH032/fastapi-proxy-lib/issues/15)
43 |
44 | You can refer following example to modify the response:
45 |
46 | ```python
47 | --8<-- "docs_src/advanced/modify-response.py"
48 | ```
49 |
50 | visit `/`, you will notice that the response body is printed to the console.
51 |
52 | ## Modify (redefine) response only to particular endpoint
53 |
54 | ```python
55 | --8<-- "docs_src/advanced/modify-response-particular.py"
56 | ```
57 |
58 | In this example all requests except `GET /ip` will be passed to `httpbin.org`:
59 |
60 | ```bash
61 | # we assume your proxy server is running on `http://127.0.0.0:8000`
62 |
63 | # from `httpbin.org` which is proxied
64 | curl http://127.0.0.0:8000/user-agent # { "user-agent": "curl/7.81.0" }
65 | # from your fastapi app
66 | curl http://127.0.0.0:8000/ip # { "msg":"Method is redefined" }
67 | ```
68 |
--------------------------------------------------------------------------------
/docs/Usage/FastAPI-Helper.md:
--------------------------------------------------------------------------------
1 | # FastAPI Helper
2 |
3 | !!!note
4 | FastAPI Helper Module need `FastAPI` installed.
5 |
6 | There are two helper modules to get FastAPI `app`/`router` for proxy conveniently.
7 |
8 | - [fastapi_proxy_lib.fastapi.app][]: High-level
9 | - [fastapi_proxy_lib.fastapi.router][]: Low-level
10 |
11 | ## app
12 |
13 | use `fastapi_proxy_lib.fastapi.app` is very convenient and out of the box, there are three helper functions:
14 |
15 | - [forward_http_app][fastapi_proxy_lib.fastapi.app.forward_http_app]
16 | - [reverse_http_app][fastapi_proxy_lib.fastapi.app.reverse_http_app]
17 | - [reverse_ws_app][fastapi_proxy_lib.fastapi.app.reverse_ws_app]
18 |
19 | Example:
20 |
21 | ```python
22 | from fastapi_proxy_lib.fastapi.app import reverse_http_app
23 | from httpx import AsyncClient
24 |
25 | client = AsyncClient() # (1)!
26 | base_url = "http://www.example.com/" # (2)!
27 |
28 | app = reverse_http_app(client=client, base_url=base_url)
29 | ```
30 |
31 | 1. You can pass `httpx.AsyncClient` instance:
32 | - if you want to customize the arguments, e.g. `httpx.AsyncClient(proxies={})`
33 | - if you want to reuse the connection pool of `httpx.AsyncClient`
34 | ---
35 | Or you can pass `None`(The default value), then `fastapi-proxy-lib` will create a new `httpx.AsyncClient` instance for you.
36 | 2. !!! note
37 | The `base_url` must end with `/`!
38 |
39 | For other app helpers, please refer to their API references.
40 |
41 | ## router
42 |
43 | For the following scenarios, you might prefer [fastapi_proxy_lib.fastapi.router][]:
44 |
45 | - When you need to adjust the `app`/`router` parameters.
46 | - When you need to [mount the proxy on a route of larger app](https://fastapi.tiangolo.com/tutorial/bigger-applications/).
47 |
48 | **^^[Please refer to the documentation of `RouterHelper` for more information :material-file-document: ][fastapi_proxy_lib.fastapi.router.RouterHelper--examples]^^**.
49 |
--------------------------------------------------------------------------------
/docs/Usage/Security.md:
--------------------------------------------------------------------------------
1 | # Security
2 |
3 | ## proxy requests filter in forward proxy
4 |
5 | Forward proxy allows access to any URL on input, which can be **scary** 😫 if not restricted.
6 |
7 | For example, through `http://www.my-proxy-server.com/http://127.0.0.1:8000`,
8 | an attacker can access the server's local network.
9 |
10 | So, there is a `proxy_filter` argument in [`ForwardHttpProxy`][fastapi_proxy_lib.core.http.ForwardHttpProxy.__init__] to filter requests.
11 |
12 | If you do not explicitly specify it, `ForwardHttpProxy` will issue a warning and specify a [default_proxy_filter][fastapi_proxy_lib.core.tool.default_proxy_filter].
13 |
14 | - If you want to accept all proxy requests (**never do this on a public server**), you can do it like this:
15 |
16 | ```python
17 | proxy_filter = lambda *_: None
18 | ```
19 |
20 | - If you want to implement your own proxy filter, please refer to the [fastapi_proxy_lib.core.tool.ProxyFilterProto][].
21 |
22 | ## `http`/`ws` vs `https`/`wss`
23 |
24 | !!! danger
25 | **Never use a server with the HTTPS protocol to proxy a target server (`base_url`) with the HTTP protocol !**
26 |
27 | e.g. `https://www.my-proxy-server.com/http://www.example.com/`
28 |
29 | There is a security issue:
30 |
31 | Browsers may send sensitive HTTPS information to your HTTPS proxy server,
32 | then because of transparency feature, `fastapi-proxy-lib` will forward
33 | these information to the target server using the HTTP protocol without modification,
34 | which may cause privacy leaks.
35 |
36 | !!! failure
37 | If you reverse it. Use an HTTP server to proxy an HTTPS target server.
38 |
39 | There is a high probability that the request will fail.
40 |
41 | ## The same-origin policy of `ForwardHttpProxy`
42 |
43 | The `ForwardHttpProxy` server uses the same source to proxy different target servers. e.g:
44 |
45 | > http://www.my-proxy-server.com/http://www.example.com/
46 | > http://www.my-proxy-server.com/http://www.google.com/
47 | >
48 | > both source are `http://www.my-proxy-server.com/`
49 |
50 | For this situation, the browser's same-origin protection policy will fail,
51 | and cookies from `http://www.example.com/` will be sent to` http://www.google.com/`.
52 |
53 | You should inform the user of this situation and let them decide whether to continue.
54 |
55 | ---
56 |
57 | ## What did `fastapi-proxy-lib` do to protect your Security? 🔐
58 |
59 | !!! info
60 | The following content is the security measures taken by `fastapi-proxy-lib` behind the scenes.
61 | You may not need to read these for using this library.
62 |
63 | ### Forbid the merging of cookies at the AsyncClient level
64 |
65 | For fix security vulnerabilities of cookies leakage between different users:
66 |
67 | - Before sending each proxy request, `fastapi-proxy-lib` will clear `AsyncClient.cookies` to avoid recording cookies from different users.
68 | - To prevent `AsyncClient` merge cookie, `fastapi-proxy-lib` will forcibly add an empty cookie string `""` to each proxy request that does not contain a cookie field header.
69 |
70 | Through these, `fastapi-proxy-lib` hopes to prevent the mergence and sharing of cookies from different users.
71 |
72 | More info, please visit [Security Advisories `GHSA-7vwr-g6pm-9hc8`](https://github.com/WSH032/fastapi-proxy-lib/security/advisories/GHSA-7vwr-g6pm-9hc8) and [#10](https://github.com/WSH032/fastapi-proxy-lib/pull/10).
73 |
74 | !!! note
75 | It will **not affect** the normal sending and receiving of cookies.
76 |
--------------------------------------------------------------------------------
/docs/Usage/index.md:
--------------------------------------------------------------------------------
1 | # Introduction
2 |
3 | We provide two types of proxies:
4 |
5 | - reverse proxy:
6 | - [ReverseHttpProxy][fastapi_proxy_lib.core.http.ReverseHttpProxy]
7 | - [ReverseWebSocketProxy][fastapi_proxy_lib.core.websocket.ReverseWebSocketProxy]
8 | - forward proxy:
9 | - [ForwardHttpProxy][fastapi_proxy_lib.core.http.ForwardHttpProxy]
10 |
11 | ## What is a reverse proxy?
12 |
13 | A reverse proxy is similar to a gateway.
14 |
15 | All reverse proxies have a `base_url` parameter, which transparently forwards all requests sent to the proxy server to the target server specified by `base_url`.
16 |
17 | For example, if you set `base_url` to `http://www.example.com/foo/`, and the proxy server launches at `http://127.0.0.1:8000`.
18 |
19 | Then all requests sent to the proxy server will be forwarded to `http://www.example.com/foo/`, including `path-params`, `query-params`, `headers`, `cookies`, etc.
20 |
21 | Visit `http//127.0.0.1:8000/bar?baz=1`, will get the response from `http://www.example.com/foo/bar?baz=1`.
22 |
23 | ## What is a forward proxy?
24 |
25 | A forward proxy is very similar to a reverse proxy, except that the forward proxy server uses the full `path` of the requests it receives as the `base_url`.
26 |
27 | For example, visit `http//127.0.0.1:8000/http://www.example.com/foo/bar?baz=1`, will get the response from `http://www.example.com/foo/bar?baz=1`.
28 |
--------------------------------------------------------------------------------
/docs/images/rocket-24.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | --8<-- "README.md"
2 |
--------------------------------------------------------------------------------
/docs_src/advanced/modify-request.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Generator
2 | from typing import Any
3 |
4 | import httpx
5 | from httpx import Request
6 |
7 | from fastapi_proxy_lib.fastapi.app import reverse_http_app
8 |
9 |
10 | class MyCustomAuth(httpx.Auth):
11 | # ref: https://www.python-httpx.org/advanced/#customizing-authentication
12 |
13 | def __init__(self, token: str):
14 | self.token = token
15 |
16 | def auth_flow(self, request: httpx.Request) -> Generator[Request, Any, None]:
17 | # Send the request, with a custom `X-Authentication` header.
18 | request.headers["X-Authentication"] = self.token
19 | yield request
20 |
21 |
22 | app = reverse_http_app(
23 | client=httpx.AsyncClient(auth=MyCustomAuth("bearer_token")),
24 | base_url="http://www.httpbin.org/",
25 | )
26 |
--------------------------------------------------------------------------------
/docs_src/advanced/modify-response-particular.py:
--------------------------------------------------------------------------------
1 | from collections.abc import AsyncIterator
2 | from contextlib import asynccontextmanager
3 |
4 | from fastapi import FastAPI
5 | from starlette.requests import Request
6 |
7 | from fastapi_proxy_lib.core.http import ReverseHttpProxy
8 |
9 | proxy = ReverseHttpProxy(base_url="http://httpbin.org/")
10 |
11 |
12 | @asynccontextmanager
13 | async def close_proxy_event(_: FastAPI) -> AsyncIterator[None]:
14 | """Close proxy."""
15 | yield
16 | await proxy.aclose()
17 |
18 |
19 | app = FastAPI(lifespan=close_proxy_event)
20 |
21 |
22 | @app.get("/{path:path}")
23 | @app.post("/{path:path}")
24 | async def _(request: Request, path: str = ""):
25 | if path == "ip" and request.method == "GET":
26 | return {"msg": "Method is redefined"}
27 | else:
28 | return await proxy.proxy(request=request, path=path)
29 |
--------------------------------------------------------------------------------
/docs_src/advanced/modify-response.py:
--------------------------------------------------------------------------------
1 | from collections.abc import AsyncIterator
2 | from contextlib import asynccontextmanager
3 |
4 | from fastapi import FastAPI
5 | from starlette.requests import Request
6 | from starlette.responses import AsyncContentStream, StreamingResponse
7 |
8 | from fastapi_proxy_lib.core.http import ReverseHttpProxy
9 |
10 | proxy = ReverseHttpProxy(base_url="http://www.example.com/")
11 |
12 |
13 | @asynccontextmanager
14 | async def close_proxy_event(_: FastAPI) -> AsyncIterator[None]:
15 | """Close proxy."""
16 | yield
17 | await proxy.aclose()
18 |
19 |
20 | app = FastAPI(lifespan=close_proxy_event)
21 |
22 |
23 | async def new_content(origin_content: AsyncContentStream) -> AsyncContentStream:
24 | """Fake content processing."""
25 | async for chunk in origin_content:
26 | # do some processing with chunk, e.g transcoding,
27 | # here we just print and return it as an example.
28 | print(chunk)
29 | yield chunk
30 |
31 |
32 | @app.get("/{path:path}")
33 | async def _(request: Request, path: str = ""):
34 | proxy_response = await proxy.proxy(request=request, path=path)
35 |
36 | if isinstance(proxy_response, StreamingResponse):
37 | # get the origin content stream
38 | old_content = proxy_response.body_iterator
39 |
40 | new_resp = StreamingResponse(
41 | content=new_content(old_content),
42 | status_code=proxy_response.status_code,
43 | headers=proxy_response.headers,
44 | media_type=proxy_response.media_type,
45 | )
46 | return new_resp
47 |
48 | return proxy_response
49 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | # yaml-language-server: $schema=https://squidfunk.github.io/mkdocs-material/schema.json
2 |
3 | site_name: fastapi-proxy-lib
4 | site_url: https://WSH032.github.io/fastapi-proxy-lib/
5 |
6 | repo_url: https://github.com/WSH032/fastapi-proxy-lib/
7 | repo_name: WSH032/fastapi-proxy-lib
8 |
9 | edit_uri: edit/main/docs/
10 |
11 | theme:
12 | name: material
13 | icon:
14 | logo: octicons/rocket-24
15 | favicon: images/rocket-24.svg
16 | features:
17 | - content.code.copy
18 | - content.code.annotate
19 | - navigation.instant
20 | - navigation.instant.progress
21 | - navigation.tabs
22 | - search.suggest
23 | - search.highlight
24 | - search.share
25 | - navigation.footer
26 | - content.action.edit
27 | - content.action.view
28 | - content.tabs.link
29 | - content.tooltips
30 | - navigation.top
31 | # - navigation.expand
32 | # - navigation.tracking
33 | # https://squidfunk.github.io/mkdocs-material/setup/changing-the-colors/#system-preference
34 | palette:
35 | # Palette toggle for light mode
36 | - media: "(prefers-color-scheme: light)"
37 | scheme: default
38 | toggle:
39 | icon: material/brightness-7
40 | name: Switch to dark mode
41 | # Palette toggle for dark mode
42 | - media: "(prefers-color-scheme: dark)"
43 | scheme: slate
44 | toggle:
45 | icon: material/brightness-4
46 | name: Switch to light mode
47 |
48 | markdown_extensions:
49 | # Python Markdown
50 | - abbr
51 | - admonition
52 | - attr_list
53 | - def_list
54 | - footnotes
55 | - md_in_html
56 | - tables
57 | - toc:
58 | permalink: true
59 |
60 | # Python Markdown Extensions
61 | - pymdownx.arithmatex:
62 | generic: true
63 | - pymdownx.betterem:
64 | smart_enable: all
65 | - pymdownx.critic
66 | - pymdownx.caret
67 | - pymdownx.details
68 | - pymdownx.emoji:
69 | emoji_index: !!python/name:material.extensions.emoji.twemoji
70 | emoji_generator: !!python/name:material.extensions.emoji.to_svg
71 | - pymdownx.highlight:
72 | anchor_linenums: true
73 | line_spans: __span
74 | pygments_lang_class: true
75 | - pymdownx.snippets
76 | - pymdownx.inlinehilite
77 | - pymdownx.keys
78 | - pymdownx.mark
79 | - pymdownx.smartsymbols
80 | - pymdownx.superfences
81 | # - pymdownx.superfences:
82 | # custom_fences:
83 | # - name: mermaid
84 | # class: mermaid
85 | # format: !!python/name:pymdownx.superfences.fence_code_format
86 | - pymdownx.tabbed:
87 | alternate_style: true
88 | slugify: !!python/object/apply:pymdownx.slugs.slugify
89 | kwds:
90 | case: lower
91 | - pymdownx.tasklist:
92 | custom_checkbox: true
93 | - pymdownx.tilde
94 |
95 | plugins:
96 | # - offline
97 | - search
98 | - gen-files:
99 | scripts:
100 | - scripts/gen_ref_pages.py
101 | - literate-nav:
102 | nav_file: SUMMARY.md
103 | - section-index
104 | - mkdocstrings:
105 | default_handler: python
106 | handlers:
107 | python:
108 | import:
109 | - https://frankie567.github.io/httpx-ws/objects.inv
110 | - https://fastapi.tiangolo.com/objects.inv
111 | options:
112 | docstring_style: google
113 | paths: [src]
114 | # Remember: https://github.com/timvink/mkdocs-git-revision-date-localized-plugin#note-when-using-build-environments
115 | - git-revision-date-localized:
116 | fallback_to_build_date: true
117 | enable_creation_date: true
118 | type: timeago
119 | - git-committers:
120 | repository: WSH032/fastapi-proxy-lib
121 | branch: main
122 |
123 | extra:
124 | social:
125 | - icon: fontawesome/brands/github
126 | link: https://github.com/WSH032/
127 |
128 | watch:
129 | - src/fastapi_proxy_lib
130 | - README.md
131 | - CONTRIBUTING.md
132 | - CHANGELOG.md
133 | - docs_src/
134 |
135 | validation:
136 | omitted_files: warn
137 | absolute_links: warn
138 | unrecognized_links: warn
139 |
140 | # Don't change the name "reference/"
141 | # It's used in scripts/gen_ref_pages.py
142 | nav:
143 | - Home: index.md
144 | - Usage:
145 | - Usage/index.md
146 | - Usage/FastAPI-Helper.md
147 | - Usage/Advanced.md
148 | - Usage/Security.md
149 | - API Reference: reference/
150 | - CONTRIBUTING:
151 | - CONTRIBUTING/CONTRIBUTING.md
152 | - CHANGELOG:
153 | - CHANGELOG/CHANGELOG.md
154 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["hatchling >= 1.13.0", "hatch-regex-commit"]
3 | build-backend = "hatchling.build"
4 |
5 | # https://hatch.pypa.io/latest/config/metadata/
6 | [project]
7 | name = "fastapi-proxy-lib"
8 | requires-python = ">=3.9"
9 | readme = "README.md"
10 | license = { file = "LICENSE" }
11 | authors = [
12 | { name = "Sean Wang", email = "126865849+WSH032@users.noreply.github.com" },
13 | ]
14 | description = "HTTP/WebSocket proxy for starlette/FastAPI."
15 | keywords = [
16 | "proxy",
17 | "reverse-proxy",
18 | "forward-proxy",
19 | "http",
20 | "websocket",
21 | "asgi",
22 | "starlette",
23 | "fastapi",
24 | "httpx",
25 | "httpx-ws",
26 | ]
27 | # https://pypi.org/classifiers/
28 | classifiers = [
29 | "License :: OSI Approved :: Apache Software License",
30 | "Operating System :: OS Independent",
31 | "Intended Audience :: Developers",
32 | "Programming Language :: Python :: 3.9",
33 | "Programming Language :: Python :: 3.10",
34 | "Programming Language :: Python :: 3.11",
35 | "Programming Language :: Python :: 3.12",
36 | "Programming Language :: Python :: 3.13",
37 | "Programming Language :: Python :: 3 :: Only",
38 | "Topic :: Internet :: WWW/HTTP :: HTTP Servers",
39 | "Topic :: Internet :: WWW/HTTP :: Session",
40 | "Environment :: Web Environment",
41 | "Framework :: AsyncIO",
42 | "Framework :: FastAPI",
43 | "Typing :: Typed",
44 | ]
45 |
46 | dynamic = ["version"]
47 |
48 | # NOTE: version constraints
49 | # https://iscinumpy.dev/post/bound-version-constraints/
50 |
51 | dependencies = [
52 | "httpx",
53 | "httpx-ws >= 0.7.1",
54 | "starlette",
55 | "typing_extensions >=4.5.0",
56 | ]
57 |
58 | [project.optional-dependencies]
59 | standard = ["fastapi"]
60 | all = ["fastapi-proxy-lib[standard]"]
61 |
62 | [project.urls]
63 | Documentation = "https://WSH032.github.io/fastapi-proxy-lib/"
64 | "Source code" = "https://github.com/WSH032/fastapi-proxy-lib"
65 |
66 |
67 | [tool.hatch.version]
68 | # refer to: https://github.com/frankie567/hatch-regex-commit
69 | source = "regex_commit"
70 | commit_extra_args = ["-e"]
71 | path = "src/fastapi_proxy_lib/__init__.py"
72 | # NOTE: `chore` is required by commitlint
73 | commit_message = "chore(version): 🚀 bump version v{current_version} → v{new_version}"
74 | tag_message = "🚀 bump version v{current_version} → v{new_version}"
75 | # NOTE: `v` prefix is required by github `publish.yml` action
76 | tag_name = "v{new_version}"
77 | check_dirty = false
78 |
79 |
80 | [tool.hatch.envs.default]
81 | path = ".venv" # for vscode auto selecting python interpreter
82 | features = ["standard"] # NOTE: Don't use Self-referential
83 | dependencies = [
84 | # NOTE: 👇
85 | # The versions of `black`, `ruff`, `codespell`, must be consistent with the `.pre-commit-config.yaml`.
86 | # Don't edit them manually, use `pre-commit run ver_sync` instead.
87 | "black==25.1.0",
88 | "ruff==0.11.2",
89 | "codespell==2.4.1",
90 | # Don't write comments on these lines, because they will be removed by `pre-commit run ver_sync`.
91 | # NOTE: 👆
92 |
93 | # lint-check
94 | "pyright == 1.1.356", # pyright must be installed in the runtime environment
95 | # test
96 | "pytest == 7.*",
97 | "pytest-cov == 4.*",
98 | "uvicorn[standard] < 1.0.0", # TODO: Once it releases version 1.0.0, we will remove this restriction.
99 | "httpx[http2]", # we don't set version here, instead set it in `[project].dependencies`.
100 | "anyio", # we don't set version here, because fastapi has a dependency on it
101 | "asgi-lifespan==2.*",
102 | "pytest-timeout==2.*",
103 | ]
104 |
105 | [tool.hatch.envs.default.scripts]
106 | # https://pytest-cov.readthedocs.io/en/latest/config.html
107 | # xml for codecov, html for local review
108 | test = "pytest tests/ --cov --cov-report=xml --cov-report=html"
109 | # ruff must before black
110 | lint = ["- ruff check --fix .", "- black .", "- pyright .", "- codespell . -i 3 -w"]
111 | lint-check = [
112 | "ruff check .",
113 | "black --check .",
114 | "pyright .",
115 | "pyright --verifytypes fastapi_proxy_lib --ignoreexternal",
116 | "codespell .",
117 | ]
118 | check-all = ["lint-check", "test"]
119 |
120 | # https://hatch.pypa.io/latest/config/environment/overview/#detached-environments
121 | [tool.hatch.envs.docs]
122 | path = ".venv-docs"
123 | detached = true
124 | dependencies = [
125 | "mkdocs-material == 9.*",
126 | "mkdocstrings[python] == 0.27.*",
127 | "mkdocs-gen-files == 0.5.*",
128 | "mkdocs-literate-nav == 0.6.*",
129 | "mkdocs-section-index == 0.3.*",
130 | "mkdocs-git-revision-date-localized-plugin == 1.*",
131 | "mkdocs-git-committers-plugin-2 == 2.*",
132 | ]
133 |
134 | [tool.hatch.envs.docs.scripts]
135 | docs-serve = "mkdocs serve"
136 | docs-build = "mkdocs build"
137 |
138 |
139 | # https://docs.astral.sh/ruff/rules/
140 | [tool.ruff.lint]
141 | select = [
142 | "E", # pycodestyle errors
143 | "W", # pycodestyle warnings
144 | "F", # pyflakes
145 | "I", # isort
146 | "C4", # flake8-comprehensions
147 | "B", # flake8-bugbear
148 | "N", # pep8-naming
149 | "UP", # pyupgrade
150 | "D", # pydocstyle
151 | "SIM", # flake8-simplify
152 | "RUF", # unused-noqa
153 | "Q", # flake8-quotes
154 | "C90", # McCabe complexity
155 | "ANN001", # missing-type-function-argument
156 | "ANN201", # missing-return-type-undocumented-public-function
157 | "ASYNC", # flake8-async
158 | "A", # flake8-builtins
159 | "COM", # flake8-commas
160 | "ISC", # flake8-implicit-str-concat
161 | "ICN001", # unconventional-import-alias
162 | "PIE", # flake8-pie
163 | "PT", # flake8-pytest-style
164 | "INT", # flake8-gettext
165 | "ARG", # flake8-unused-arguments
166 | "PGH004", # blanket-noqa
167 | "TRY201", # Use raise without specifying exception name
168 | "NPY", # NumPy-specific rules
169 | "PD", # pandas-vet
170 | "PERF", # Perflint
171 | "PL", # Pylint
172 | "TID252", # Relative imports from parent modules are banned
173 | ]
174 | ignore = [
175 | "E501", # line too long, handled by black
176 | "COM812", # missing-trailing-comma
177 | "PERF203", # try-except within a loop incurs performance overhead
178 | "PLR2004", # magic-value-comparison
179 | "PLR5501", # collapsible-else-if
180 | "PLW0120", # useless-else-on-loop
181 | "PLR0911", # too-many-return-statements
182 | "PLR0913", # too-many-arguments
183 | "PLC0205", # single-string-slots
184 | "PLW0603", # global-statement
185 | "PLC1901", # compare-to-empty-string
186 | "PLR0904", # too-many-public-methods
187 | "RUF002", # ambiguous-unicode-character-docstring
188 | "RUF003", # ambiguous-unicode-character-comment
189 | "SIM105", # suppressible-exception # slower
190 | # "D418", # Function decorated with `@overload` shouldn't contain a docstring
191 | # "SIM108", # if-else-block-instead-of-if-exp
192 | ]
193 |
194 | [tool.ruff.lint.per-file-ignores]
195 | "docs_src/**/*.py" = ["D"]
196 |
197 | # https://docs.astral.sh/ruff/settings/#pydocstyle
198 | [tool.ruff.lint.pydocstyle]
199 | convention = "google"
200 |
201 | # [tool.ruff.flake8-tidy-imports]
202 | # ban-relative-imports = "all"
203 |
204 |
205 | # https://microsoft.github.io/pyright/#/configuration
206 | [tool.pyright]
207 | typeCheckingMode = "strict"
208 | pythonVersion = "3.9"
209 | reportUnusedImport = "warning"
210 | reportUnusedFunction = "warning"
211 | reportUnusedExpression = "warning"
212 | reportUnusedVariable = "warning"
213 | reportUnnecessaryTypeIgnoreComment = true
214 | reportPrivateUsage = "warning"
215 | reportUnnecessaryIsInstance = "warning"
216 | reportIncompatibleMethodOverride = "warning"
217 | reportMissingTypeArgument = true
218 | reportMissingParameterType = true
219 |
220 |
221 | # https://coverage.readthedocs.io/en/7.3.2/config.html#run
222 | [tool.coverage.run]
223 | branch = true
224 | source = ['fastapi_proxy_lib']
225 |
226 | # https://coverage.readthedocs.io/en/7.3.2/config.html#report
227 | [tool.coverage.report]
228 | exclude_also = [
229 | "if __name__ == .__main__.:",
230 | "if TYPE_CHECKING:",
231 | "raise NotImplementedError",
232 | "class .*\\bProtocol\\):",
233 | "@(abc\\.)?abstractmethod",
234 | # # deprecated code will not be tested
235 | "@(typing_extensions\\.)?deprecated",
236 | # `overload` just for type hint, will not be tested
237 | "@(typing_extensions\\.)?overload",
238 | "@(typing\\.)?overload",
239 | "raise AssertionError",
240 | ]
241 |
242 |
243 | [tool.codespell]
244 | # https://github.com/codespell-project/codespell/issues/1887
245 | skip = "./htmlcov,./site"
246 |
247 |
248 | [tool.pytest.ini_options]
249 | timeout = 15
250 |
--------------------------------------------------------------------------------
/scripts/gen_ref_pages.py:
--------------------------------------------------------------------------------
1 | # pyright: basic
2 |
3 | """Generate the code reference pages and navigation.
4 |
5 | Copy form: https://mkdocstrings.github.io/recipes/
6 |
7 | NOTE: Keep the following directory structure:
8 |
9 | 📁 repo/
10 | ├── 📁 docs/
11 | │ └── 📄 index.md
12 | ├── 📁 scripts/
13 | │ └── 📄 gen_ref_pages.py
14 | ├── 📁 src/
15 | │ └── 📁 project/
16 | └── 📄 mkdocs.yml
17 | """
18 |
19 | from pathlib import Path
20 |
21 | import mkdocs_gen_files # type: ignore
22 |
23 | nav = mkdocs_gen_files.Nav()
24 |
25 | SRC = Path(__file__).parent.parent / "src"
26 | INDEX_MD_NAME = "index.md"
27 |
28 | for path in sorted(SRC.rglob("*.py")):
29 | module_path = path.relative_to(SRC).with_suffix("")
30 | doc_path = path.relative_to(SRC).with_suffix(".md")
31 | # Don't change the name "reference"
32 | # It's used in mkdocs.yml
33 | full_doc_path = Path("reference", doc_path)
34 |
35 | parts = tuple(module_path.parts)
36 |
37 | if parts[-1] == "__init__":
38 | parts = parts[:-1]
39 | doc_path = doc_path.with_name(INDEX_MD_NAME)
40 | full_doc_path = full_doc_path.with_name(INDEX_MD_NAME)
41 | elif parts[-1].startswith("_"):
42 | continue
43 |
44 | nav[parts] = doc_path.as_posix()
45 |
46 | with mkdocs_gen_files.open(full_doc_path, "w") as fd:
47 | ident = ".".join(parts)
48 | fd.writelines(f"::: {ident}")
49 |
50 | mkdocs_gen_files.set_edit_path(full_doc_path, Path("../") / path)
51 |
52 | # Don't change the name "reference/SUMMARY.md"
53 | # It's used in mkdocs.yml
54 | with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
55 | nav_file.writelines(nav.build_literate_nav())
56 |
--------------------------------------------------------------------------------
/scripts/pre_commit_scripts/ver_sync.py:
--------------------------------------------------------------------------------
1 | # pyright: basic
2 |
3 | """Maintain lint tools version consistency between `.pre-commit-config.yaml` and `pyproject.toml`."""
4 |
5 |
6 | # https://packaging.pypa.io/en/stable/requirements.html
7 | # https://yaml.readthedocs.io/en/latest/example/
8 | # https://tomlkit.readthedocs.io/en/latest/quickstart/
9 | # https://hatch.pypa.io/latest/config/environment/overview/#dependencies
10 |
11 |
12 | import sys
13 | from pathlib import Path
14 | from typing import (
15 | Any,
16 | Union,
17 | )
18 |
19 | import tomlkit # type: ignore
20 | import tomlkit.items # type: ignore
21 | from packaging.requirements import Requirement # type: ignore
22 | from packaging.specifiers import SpecifierSet # type: ignore
23 | from packaging.version import Version # type: ignore
24 | from ruamel.yaml import YAML # type: ignore
25 |
26 | yaml = YAML(typ="safe")
27 |
28 | pre_commit_config_yaml_path = Path(".pre-commit-config.yaml")
29 | pyproject_toml_path = Path("pyproject.toml")
30 |
31 | RepoType = dict[str, Any]
32 | HookType = dict[str, Any]
33 |
34 | if __name__ == "__main__":
35 | # NOTE: 这三个键名应该对应
36 | # pyproject_toml["tool"]["hatch"]["envs"]["default"]["dependencies"] 里的值
37 | vers_in_pre_commit: dict[str, Union[None, str]] = {
38 | "ruff": None,
39 | "black": None,
40 | "codespell": None,
41 | }
42 |
43 | # 找出pre-commit-config.yaml中的版本
44 | pre_commit_yaml = yaml.load(pre_commit_config_yaml_path)
45 | repos_lst: list[RepoType] = pre_commit_yaml["repos"]
46 |
47 | for repo in repos_lst:
48 | hooks_lst: list[HookType] = repo["hooks"]
49 | hook = hooks_lst[0] # 特殊标记的只有一个hook
50 | hook_alias = hook.get("alias") # 只有特殊标记的才有alias
51 | if hook_alias is None:
52 | continue
53 | if hook_alias in vers_in_pre_commit:
54 | vers_in_pre_commit[hook_alias] = repo["rev"]
55 |
56 | # 检查是否正确
57 | new_vers: dict[str, Version] = {}
58 | for name, ver in vers_in_pre_commit.items():
59 | if not isinstance(ver, str):
60 | sys.exit(f"Error: version of `{name}` not found in pre-commit-config.yaml")
61 | try:
62 | new_vers[name] = Version(ver)
63 | except Exception as e:
64 | sys.exit(f"{e}: version of `{name}` in pre-commit-config.yaml is invalid")
65 |
66 | # 修改pyproject.toml中的版本
67 | with open(pyproject_toml_path, "rb") as f: # NOTE: 用二进制模式打开
68 | pyproject_toml = tomlkit.load(f)
69 | requir_lst = pyproject_toml["tool"]["hatch"]["envs"]["default"]["dependencies"] # type: ignore
70 | assert isinstance(requir_lst, tomlkit.items.Array)
71 |
72 | for idx, require in enumerate(requir_lst):
73 | assert isinstance(require, tomlkit.items.String)
74 | parsed_requir = Requirement(require)
75 | if parsed_requir.name in new_vers:
76 | # 更新版本
77 | parsed_requir.specifier = SpecifierSet(
78 | f"=={new_vers.pop(parsed_requir.name)}"
79 | )
80 | requir_lst[idx] = str(parsed_requir)
81 |
82 | # 在上一步的pop操作应该把所有的依赖都更新了,如果这里字典不为空,说明发生了错误
83 | if new_vers:
84 | sys.exit(
85 | f"Error: version of `{new_vers.popitem()}` not found in pyproject.toml"
86 | )
87 |
88 | # 如果没错误,就准备更新
89 | pyproject_toml["tool"]["hatch"]["envs"]["default"]["dependencies"] = requir_lst # type: ignore
90 |
91 | with open(pyproject_toml_path, "wb") as f:
92 | toml_str = tomlkit.dumps(pyproject_toml)
93 | f.write(toml_str.encode("utf-8")) # NOTE: 用utf-8二进制写入,否则文本样式会乱
94 |
--------------------------------------------------------------------------------
/src/fastapi_proxy_lib/__init__.py:
--------------------------------------------------------------------------------
1 | """fastapi_proxy_lib package."""
2 |
3 | # DO NOT EDIT THE `__version__` MANUALLY.
4 | # Use `hatch version {new_version}` instead.
5 | # Refer to `CONTRIBUTING.md` for more info.
6 | __version__ = "0.3.0"
7 |
--------------------------------------------------------------------------------
/src/fastapi_proxy_lib/core/__init__.py:
--------------------------------------------------------------------------------
1 | """Proxy core package."""
2 |
--------------------------------------------------------------------------------
/src/fastapi_proxy_lib/core/_model.py:
--------------------------------------------------------------------------------
1 | """The data model for both http proxy and websocket proxy."""
2 |
3 | import abc
4 | from typing import (
5 | Any,
6 | Optional,
7 | )
8 |
9 | from httpx import AsyncClient
10 |
11 | __all__ = ("BaseProxyModel",)
12 |
13 |
14 | class BaseProxyModel(abc.ABC):
15 | """Http proxy base ABC class.
16 |
17 | Attributes:
18 | client: The httpx.AsyncClient to send http requests.
19 | follow_redirects: Whether follow redirects of proxy server.
20 | """
21 |
22 | client: AsyncClient
23 | follow_redirects: bool
24 |
25 | def __init__(
26 | self, client: Optional[AsyncClient] = None, *, follow_redirects: bool = False
27 | ) -> None:
28 | """Http proxy base class.
29 |
30 | Args:
31 | client: The httpx.AsyncClient to send http requests. Defaults to None.
32 | if None, will create a new httpx.AsyncClient,
33 | else will use the given httpx.AsyncClient.
34 | follow_redirects: Whether follow redirects of proxy server. Defaults to False.
35 | """
36 | self.client = client if client is not None else AsyncClient()
37 | self.follow_redirects = follow_redirects
38 |
39 | async def aclose(self) -> None:
40 | """Close AsyncClient.
41 |
42 | Equal to:
43 | await self.client.aclose()
44 | """
45 | await self.client.aclose()
46 |
47 | @abc.abstractmethod
48 | async def send_request_to_target(self, *args: Any, **kwargs: Any) -> Any:
49 | """Abstract method to send request to target server.
50 |
51 | Subclass must implement this method.
52 | - Should accept orinal request from client, such as starlette.requests.Request | starlette.websockets.WebSocket .
53 | - Then adjust the request, e.g change the host of request to target proxy server.
54 | - Then sent the request to target proxy server.
55 | - Response:
56 | - If is http proxy response, should return starlette.responses.Response.
57 | - If is websocket proxy, just establish the connection between client and target server.
58 | """
59 | raise NotImplementedError()
60 |
61 | @abc.abstractmethod
62 | async def proxy(self, *args: Any, **kwargs: Any) -> Any:
63 | """A user-facing high-level method that encapsulates the `self.send_request_to_target()` method.
64 |
65 | Receives the raw incoming parameters from the app,
66 | processes the parameters before passing them to the `self.send_request_to_target()` method,
67 | or independently determines the specific internal implementation.
68 | Its return value should be consistent with that of send_request_to_target.
69 | """
70 | raise NotImplementedError()
71 |
--------------------------------------------------------------------------------
/src/fastapi_proxy_lib/core/_tool.py:
--------------------------------------------------------------------------------
1 | """The utils tools for both http proxy and websocket proxy."""
2 |
3 | import ipaddress
4 | import logging
5 | import warnings
6 | from collections.abc import Iterable, Mapping
7 | from functools import lru_cache
8 | from textwrap import dedent
9 | from typing import (
10 | Any,
11 | Optional,
12 | Protocol,
13 | TypedDict,
14 | TypeVar,
15 | Union,
16 | )
17 |
18 | import httpx
19 | from starlette import status
20 | from starlette.background import BackgroundTask as BackgroundTask_t
21 | from starlette.datastructures import (
22 | Headers as StarletteHeaders,
23 | )
24 | from starlette.datastructures import (
25 | MutableHeaders as StarletteMutableHeaders,
26 | )
27 | from starlette.responses import JSONResponse
28 | from starlette.types import Scope
29 | from typing_extensions import deprecated, overload
30 |
31 | __all__ = (
32 | "BaseURLError",
33 | "ErrMsg",
34 | "ErrRseponseJson",
35 | "ProxyFilterProto",
36 | "_RejectedProxyRequestError",
37 | "check_base_url",
38 | "check_http_version",
39 | "default_proxy_filter",
40 | "lru_get_url",
41 | "reset_lru_get_url",
42 | "return_err_msg_response",
43 | "warn_for_none_filter",
44 | )
45 |
46 | _logger = logging.getLogger(__name__)
47 |
48 | #################### Constant ####################
49 |
50 |
51 | #################### Data Model ####################
52 |
53 |
54 | _ProxyFilterTypeVar = TypeVar("_ProxyFilterTypeVar", bound="ProxyFilterProto")
55 |
56 |
57 | class ProxyFilterProto(Protocol):
58 | """All proxy filter must implement like this."""
59 |
60 | def __call__(self, url: httpx.URL, /) -> Union[None, str]:
61 | """Decide whether accept the proxy request by the given url.
62 |
63 | Examples:
64 | Refer to [`default_proxy_filter`][fastapi_proxy_lib.core._tool.default_proxy_filter]
65 |
66 | Args:
67 | url: The target url of the client request to proxy.
68 |
69 | Returns:
70 | None: should accept the proxy request.
71 | str: should rejetc the proxy request.
72 | The `str` is the reason of reject.
73 | """
74 |
75 |
76 | class LoggerProtocol(Protocol):
77 | """Like logging.error() ."""
78 |
79 | def __call__(
80 | self,
81 | *,
82 | msg: object,
83 | exc_info: Union[BaseException, None, bool],
84 | ) -> Any: ...
85 |
86 |
87 | class ErrMsg(TypedDict):
88 | """A error message of response.
89 |
90 | Attributes:
91 | err_type: equal to {type(error).__name__}.
92 | msg: equal to {str(error)}.
93 | """
94 |
95 | # NOTE: `err_type` 和 `msg` 键是api设计的一部分
96 | err_type: str
97 | msg: str
98 |
99 |
100 | class ErrRseponseJson(TypedDict):
101 | """A json-like dict for return by `JSONResponse`.
102 |
103 | Something like:
104 | ```json
105 | {
106 | "detail": {
107 | "err_type": "RuntimeError",
108 | "msg": "Something wrong."
109 | }
110 | }
111 | ```
112 | """
113 |
114 | # https://fastapi.tiangolo.com/tutorial/handling-errors/#httpexception
115 | # NOTE: `detail` 键是api设计的一部分
116 | detail: ErrMsg
117 |
118 |
119 | #################### Error ####################
120 |
121 |
122 | class BaseURLError(ValueError):
123 | """Invalid URL."""
124 |
125 |
126 | """带有 '_' 开头的错误,通常用于返回给客户端,而不是在python内部处理.""" # noqa: RUF001
127 |
128 |
129 | class _RejectedProxyRequestError(RuntimeError):
130 | """Should be raised when reject proxy request."""
131 |
132 |
133 | class _UnsupportedHttpVersionError(RuntimeError):
134 | """Unsupported http version."""
135 |
136 |
137 | #################### Tools ####################
138 |
139 |
140 | @deprecated(
141 | "May or may not be removed in the future.", category=PendingDeprecationWarning
142 | )
143 | def reset_lru_get_url(maxsize: Union[int, None] = 128, typed: bool = False) -> None:
144 | """Reset the parameters or clear the cache of `lru_get_url`.
145 |
146 | Args:
147 | maxsize: The same as `functools.lru_cache`.
148 | typed: The same as `functools.lru_cache`.
149 | """
150 | global _lru_get_url
151 | _lru_get_url.cache_clear()
152 | _lru_get_url = lru_cache(maxsize, typed)(_lru_get_url.__wrapped__)
153 |
154 |
155 | @deprecated(
156 | "May or may not be removed in the future.", category=PendingDeprecationWarning
157 | )
158 | @lru_cache(maxsize=1024)
159 | def _lru_get_url(url: str) -> httpx.URL:
160 | return httpx.URL(url)
161 |
162 |
163 | @deprecated(
164 | "May or may not be removed in the future.", category=PendingDeprecationWarning
165 | )
166 | def lru_get_url(url: str) -> httpx.URL:
167 | """Lru cache for httpx.URL(url)."""
168 | # 因为 lru 缓存返回的是可变对象,所以这里需要复制一份
169 | return _lru_get_url(url).copy_with()
170 |
171 |
172 | def check_base_url(base_url: Union[httpx.URL, str], /) -> httpx.URL:
173 | """Check and format base_url.
174 |
175 | - Time consumption: 56.2 µs ± 682 ns.
176 |
177 | Args:
178 | base_url: url that need to be checked and formatted.
179 | - If base_url is a str, it will be converted to httpx.URL.
180 |
181 | Raises:
182 | BaseURLError:
183 | - if base_url does not contain {scheme} or {netloc}.
184 | - if base_url does not ends with "/".
185 |
186 | Returns:
187 | `base_url.copy_with(query=None, fragment=None)`
188 | - The copy of original `base_url`.
189 |
190 | Examples:
191 | r = check_base_url("https://www.example.com/p0/p1?q=1")
192 | assert r == "https://www.example.com/p0/"
193 |
194 | The components of a URL are broken down like this:
195 | https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink
196 | [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment]
197 | [ userinfo ] [ netloc ][ raw_path ]
198 | """
199 | example_url = "https://www.example.com/path/"
200 |
201 | # 避免修改原来的 base_url
202 | base_url = (
203 | base_url.copy_with() if isinstance(base_url, httpx.URL) else httpx.URL(base_url)
204 | )
205 |
206 | if not base_url.scheme or not base_url.netloc:
207 | raise BaseURLError(
208 | dedent(
209 | f"""\
210 | `base_url` must contain scheme and netloc,
211 | e.g. {example_url}
212 | got: {base_url}\
213 | """
214 | )
215 | )
216 |
217 | # NOTE: 尽量用 URL.copy_with() 来修改URL,而不是 URL.join(),因为后者性能较差
218 |
219 | if base_url.query or base_url.fragment:
220 | base_url = base_url.copy_with(query=None, fragment=None)
221 | warnings.warn(
222 | dedent(
223 | f"""\
224 | `base_url` should not contain `query` or `fragment`, which will be ignored.
225 | The `base_url` will be treated as: {base_url}\
226 | """
227 | ),
228 | stacklevel=2,
229 | )
230 | # 我们在这里强制要求 base_url 以"/"结尾是有原因的:
231 | # 因为 RouterHelper 生成的路由是以"/"结尾的,在反向代理时
232 | # "/" 之后后路径参数将会被拼接到这个 base_url 后面
233 | if not str(base_url).endswith("/"):
234 | msg = dedent(
235 | f"""\
236 | `base_url` must ends with "/", may be you mean:
237 | {base_url}/\
238 | """
239 | )
240 | raise BaseURLError(msg)
241 |
242 | return base_url
243 |
244 |
245 | # TODO: https://fastapi.tiangolo.com/tutorial/handling-errors/
246 | # 使用这个引发异常让fastapi自动处理,而不是返回一个JSONResponse
247 | # 但是这样就不能使用后台任务了
248 | def return_err_msg_response(
249 | err: Union[BaseException, ErrMsg],
250 | /,
251 | *,
252 | # JSONResponse 参数
253 | status_code: int,
254 | headers: Optional[Mapping[str, str]] = None,
255 | background: Optional[BackgroundTask_t] = None,
256 | # logger 参数
257 | logger: Optional[LoggerProtocol] = None,
258 | _msg: Optional[Any] = None,
259 | _exc_info: Optional[BaseException] = None,
260 | ) -> JSONResponse:
261 | """Return a JSONResponse with error message and log the error message by logger.
262 |
263 | - logger(msg=_msg, exc_info=_exc_info)
264 | - JSONResponse(
265 | ...,
266 | status_code=status_code,
267 | headers=headers,
268 | background=background,
269 | )
270 |
271 | The error message like:
272 | ```json
273 | {
274 | "detail": {
275 | "err_type": "RuntimeError",
276 | "msg": "Something wrong."
277 | }
278 | }
279 | ```
280 |
281 | Args:
282 | err:
283 | If err is subclass of `BaseException`, it will be converted to `ErrMsg`.
284 | If err is a `ErrMsg`, it will be used directly.
285 |
286 | status_code: The status code of response.
287 | headers: The header of response. Defaults to None.
288 | background: The background task of response. Defaults to None.
289 |
290 | logger: Something like `logging.error`. Defaults to None.
291 | If it is None, will do nothing.
292 | If it is not None, it will be used to log error message.
293 | _msg: The msg to log. Defaults to None.
294 | If it is None, it will be set to `JSONResponse` content.
295 | _exc_info: The detailed error info to log. Defaults to None.
296 | If it is None, will do nothing.
297 | If it is not None, will be passed to logger.
298 |
299 | Raises:
300 | TypeError: If err is not a BaseException or ErrMsg.
301 |
302 | Returns:
303 | JSONResponse about error message.
304 | """
305 | if isinstance(err, BaseException):
306 | detail = ErrMsg(err_type=type(err).__name__, msg=str(err))
307 | else:
308 | detail = err
309 |
310 | err_response_json = ErrRseponseJson(detail=detail)
311 |
312 | # TODO: 请注意,logging是同步函数,每次会阻塞1ms左右,这可能会导致性能问题
313 | # 特别是对于写入文件的log,最好把它放到 asyncio.to_thread 里执行
314 | # https://docs.python.org/zh-cn/3/library/asyncio-task.html#coroutine
315 |
316 | if logger is not None:
317 | # 只要传入了logger,就一定记录日志
318 | logger(
319 | msg=(
320 | _msg if _msg is not None else err_response_json
321 | ), # 如果没有指定 _msg ,则使用content
322 | exc_info=_exc_info,
323 | )
324 | else:
325 | # 如果没有传入logger,但传入了非None的_msg或_exc_info(即代表使用者可能希望记录log),则发出警告
326 | if _msg is not None or _exc_info is not None:
327 | warnings.warn(
328 | "You should pass logger to record error message, "
329 | "or you can ignore this warning if you don't want to record error message.",
330 | stacklevel=2,
331 | )
332 |
333 | return JSONResponse(
334 | content=err_response_json,
335 | status_code=status_code,
336 | headers=headers,
337 | background=background,
338 | )
339 |
340 |
341 | def check_http_version(
342 | scope: Scope, supported_versions: Iterable[str]
343 | ) -> Union[JSONResponse, None]:
344 | """Check whether the http version of scope is in supported_versions.
345 |
346 | Args:
347 | scope: asgi scope dict.
348 | supported_versions: The supported http versions.
349 |
350 | Returns:
351 | If the http version of scope is not in supported_versions, return a JSONResponse with status_code=505,
352 | else return None.
353 | """
354 | # https://asgi.readthedocs.io/en/latest/specs/www.html#http-connection-scope
355 | # https://asgi.readthedocs.io/en/latest/specs/www.html#websocket-connection-scope
356 | http_version: str = scope.get("http_version", "")
357 | # 如果明确指定了http版本(即不是""),但不在支持的版本内,则返回505
358 | if http_version not in supported_versions and http_version != "":
359 | error = _UnsupportedHttpVersionError(
360 | f"The request http version is {http_version}, but we only support {supported_versions}."
361 | )
362 | # TODO: 或许可以logging记录下 scope.get("client") 的值
363 | return return_err_msg_response(
364 | error,
365 | status_code=status.HTTP_505_HTTP_VERSION_NOT_SUPPORTED,
366 | logger=_logger.info,
367 | )
368 |
369 |
370 | def default_proxy_filter(url: httpx.URL) -> Union[None, str]:
371 | """Filter by host.
372 |
373 | Reject the following hosts:
374 |
375 | - if the host is ip address, and is not global ip address. e.g:
376 | - `http://127.0.0.1`
377 | - `http://192.168.0.1`
378 | - if the host contains "localhost".
379 |
380 | Warning:
381 | It will consumption time: 3.22~4.7 µs ± 42.6 ns.
382 |
383 | Args:
384 | url: The target url of the client request to proxy.
385 |
386 | Returns:
387 | None: should accept the proxy request.
388 | str: should rejetc the proxy request.
389 | The `str` is the reason of reject.
390 | """
391 | host = url.host
392 | if "localhost" in host:
393 | return "Deny proxy for localhost."
394 |
395 | try:
396 | ip_address = ipaddress.ip_address(host)
397 | except ValueError:
398 | return None
399 |
400 | if not ip_address.is_global:
401 | return "Deny proxy for non-public IP addresses."
402 |
403 | return None
404 |
405 |
406 | @overload
407 | def warn_for_none_filter(proxy_filter: _ProxyFilterTypeVar) -> _ProxyFilterTypeVar: ...
408 |
409 |
410 | @overload
411 | def warn_for_none_filter(proxy_filter: None) -> ProxyFilterProto: ...
412 |
413 |
414 | def warn_for_none_filter(
415 | proxy_filter: Union[ProxyFilterProto, None],
416 | ) -> ProxyFilterProto:
417 | """Check whether the argument `proxy_filter` is None.
418 |
419 | Args:
420 | proxy_filter: The argument need to be check.
421 |
422 | Returns:
423 | If proxy_filter is None, will warn user and return `default_proxy_filter`.
424 | Else will just return the original argument `proxy_filter`.
425 | """
426 | if proxy_filter is None:
427 | msg = dedent(
428 | """\
429 | The `proxy_filter` is None, which means no filter will be used.
430 | It is not recommended, because it may cause security issues.
431 |
432 | A default proxy filter will be used, which will reject the proxy request:
433 | - if the host of url is ip address, and is not global ip address.
434 |
435 | More info: https://wsh032.github.io/fastapi-proxy-lib/Usage/Security/
436 | """
437 | )
438 | warnings.warn(msg, stacklevel=3)
439 | return default_proxy_filter
440 | else:
441 | return proxy_filter
442 |
443 |
444 | def change_necessary_client_header_for_httpx(
445 | *, headers: StarletteHeaders, target_url: httpx.URL
446 | ) -> StarletteMutableHeaders:
447 | """Change client request headers for sending to proxy server.
448 |
449 | - Change "host" header to `target_url.netloc.decode("ascii")`.
450 | - If "Cookie" header is not in headers,
451 | will forcibly add a empty "Cookie" header
452 | to avoid httpx.AsyncClient automatically add another user cookiejar.
453 |
454 | Args:
455 | headers: original client request headers.
456 | target_url: httpx.URL of target server url.
457 |
458 | Returns:
459 | New requests headers, the copy of original input headers.
460 | """
461 | # https://www.starlette.io/requests/#headers
462 | new_headers = headers.mutablecopy()
463 |
464 | # 将host字段更新为目标url的host
465 | # TODO: 如果查看httpx.URL源码,就会发现netloc是被字符串编码成bytes的,能否想个办法直接获取字符串来提高性能?
466 | new_headers["host"] = target_url.netloc.decode("ascii")
467 |
468 | # https://developer.mozilla.org/zh-CN/docs/Web/HTTP/Headers/Cookie
469 |
470 | # FIX: https://github.com/WSH032/fastapi-proxy-lib/security/advisories/GHSA-7vwr-g6pm-9hc8
471 | # forcibly set `Cookie` header to avoid httpx.AsyncClient automatically add another user cookiejar
472 | if "Cookie" not in new_headers: # case-insensitive
473 | new_headers["Cookie"] = ""
474 |
475 | return new_headers
476 |
--------------------------------------------------------------------------------
/src/fastapi_proxy_lib/core/http.py:
--------------------------------------------------------------------------------
1 | """The http proxy lib."""
2 |
3 | import logging
4 | from textwrap import dedent
5 | from typing import (
6 | Any,
7 | NamedTuple,
8 | NoReturn,
9 | Optional,
10 | Union,
11 | )
12 |
13 | import httpx
14 | from starlette import status as starlette_status
15 | from starlette.background import BackgroundTasks
16 | from starlette.datastructures import (
17 | Headers as StarletteHeaders,
18 | )
19 | from starlette.datastructures import (
20 | MutableHeaders as StarletteMutableHeaders,
21 | )
22 | from starlette.requests import Request as StarletteRequest
23 | from starlette.responses import Response as StarletteResponse
24 | from starlette.responses import StreamingResponse
25 | from typing_extensions import override
26 |
27 | from ._model import BaseProxyModel
28 | from ._tool import (
29 | ProxyFilterProto,
30 | _RejectedProxyRequestError, # pyright: ignore [reportPrivateUsage] # 允许使用本项目内部的私有成员
31 | change_necessary_client_header_for_httpx,
32 | check_base_url,
33 | check_http_version,
34 | return_err_msg_response,
35 | warn_for_none_filter,
36 | )
37 |
38 | __all__ = (
39 | "BaseHttpProxy",
40 | "ForwardHttpProxy",
41 | "ReverseHttpProxy",
42 | )
43 |
44 | _logger = logging.getLogger(__name__)
45 |
46 | #################### Data Model ####################
47 |
48 |
49 | class _ConnectionHeaderParseResult(NamedTuple):
50 | """Parse result of "connection" header.
51 |
52 | Attributes:
53 | require_close: If "connection" header contain "close" value, this will be True, else False.
54 | new_headers: New request headers.
55 | "connection" header does not contain a 'close' value, but must contain 'keep-alive' value,
56 | and the "keep-alive" header was removed.
57 | """
58 |
59 | require_close: bool
60 | new_headers: StarletteMutableHeaders
61 |
62 |
63 | #################### Error ####################
64 |
65 |
66 | """带有 '_' 开头的错误,通常用于返回给客户端,而不是在python内部处理.""" # noqa: RUF001
67 |
68 | # TODO: 将这些错误移动到 _tool.py 中
69 |
70 |
71 | class _BadTargetUrlError(ValueError):
72 | """Bad target url of forward http proxy."""
73 |
74 |
75 | class _ReverseProxyServerError(RuntimeError):
76 | """502 reverse proxy server error error."""
77 |
78 |
79 | #################### Constant ####################
80 |
81 | # https://developer.mozilla.org/docs/Web/HTTP/Methods
82 | _NON_REQUEST_BODY_METHODS = ("GET", "HEAD", "OPTIONS", "TRACE")
83 | """The http methods that should not contain request body."""
84 |
85 | # https://asgi.readthedocs.io/en/latest/specs/www.html#http-connection-scope
86 | SUPPORTED_HTTP_VERSIONS = ("1.0", "1.1")
87 | """The http versions that we supported now. It depends on `httpx`."""
88 |
89 | # https://www.python-httpx.org/exceptions/
90 | _400_ERROR_NEED_TO_BE_CATCHED_IN_FORWARD_PROXY = (
91 | httpx.InvalidURL, # 解析url时出错
92 | httpx.UnsupportedProtocol, # 不支持的协议,如不是http或https
93 | httpx.ProtocolError, # 请求或者相应的格式错误,比如缺少host,或者响应头不符合规范
94 | # ValueError, # 近乎万能的错误,可能会掩盖不是网络问题的错误
95 | )
96 | """These errors need to be caught.
97 | When:
98 | - client.build_request
99 | - client.send
100 | """
101 | _500_ERROR_NEED_TO_BE_CATCHED_IN_FORWARD_PROXY = (
102 | httpx.ConnectError, # 我们无法分辨是服务器网络错误导致无法连接,还是客户端输入的了错误且无法连接的url导致了这个错误
103 | )
104 | """These errors need to be caught and return 5xx status_code.
105 | When:
106 | - client.build_request
107 | - client.send
108 | """
109 |
110 | _502_ERROR_NEED_TO_BE_CATCHED_IN_REVERSE_PROXY = (
111 | httpx.TransportError,
112 | httpx.InvalidURL,
113 | httpx.StreamError,
114 | )
115 | """When these errors occur in reverse proxy server, we think it is error of server."""
116 |
117 |
118 | #################### Tools function ####################
119 |
120 |
121 | def _change_client_header(
122 | *, headers: StarletteHeaders, target_url: httpx.URL
123 | ) -> _ConnectionHeaderParseResult:
124 | """Change client request headers for sending to proxy server.
125 |
126 | - Change "host" header to `target_url.netloc.decode("ascii")`.
127 | - If "Cookie" header is not in headers,
128 | will forcibly add a empty "Cookie" header
129 | to avoid httpx.AsyncClient automatically add another user cookiejar.
130 | - Will remove "close" value in "connection" header, and add "keep-alive" value to it.
131 | - And remove "keep-alive" header.
132 |
133 | Args:
134 | headers: original client request headers.
135 | target_url: httpx.URL of target server url.
136 |
137 | Returns:
138 | _ConnectionHeaderParseResult:
139 | require_close: If "connection" header contain "close" value, this will be True, else False.
140 | new_headers: New requests headers, the **copy** of original input headers.
141 | """
142 | # https://www.starlette.io/requests/#headers
143 |
144 | new_headers = change_necessary_client_header_for_httpx(
145 | headers=headers, target_url=target_url
146 | )
147 |
148 | # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Connection#syntax
149 | # NOTE: http标准中规定,connecttion头字段的值用于指示逐段头,而标头是大小写不敏感的,故认为可以转为小写处理
150 | client_connection_header = [
151 | v.strip() for v in new_headers.get("connection", "").lower().split(",")
152 | ]
153 |
154 | # 判断原始请求头中是否有"close"字段,如果有则将其移除,并记录
155 | if "close" in client_connection_header:
156 | whether_require_close = True
157 | client_connection_header.remove("close")
158 | else:
159 | whether_require_close = False
160 | # 强制添加"keep-alive"字段保持连接
161 | if "keep-alive" not in client_connection_header:
162 | client_connection_header.insert(0, "keep-alive")
163 | # 将新的connection头字段更新到新的请求头中
164 | # 因为 "keep-alive" 一定存在于 "connection"字段 中,所以这里不需要判断是否为空
165 | new_headers["connection"] = ",".join(client_connection_header)
166 |
167 | # 移除"keep-alive"字段
168 | if "keep-alive" in new_headers:
169 | del new_headers["keep-alive"]
170 |
171 | return _ConnectionHeaderParseResult(whether_require_close, new_headers)
172 |
173 |
174 | def _change_server_header(
175 | *, headers: httpx.Headers, require_close: bool
176 | ) -> httpx.Headers:
177 | """Change server response headers for sending to client.
178 |
179 | - If require_close is True, will make sure "connection: close" in headers, else will remove it.
180 | - And remove "keep-alive" header.
181 |
182 | Args:
183 | headers: server response headers
184 | require_close: whether require close connection
185 |
186 | Returns:
187 | The **oringinal headers**, but **had been changed**.
188 | """
189 | server_connection_header: list[str] = [
190 | v.strip() for v in headers.get("connection", "").lower().split(",")
191 | ]
192 |
193 | # 移除或添加"connection": "close"头
194 | if require_close:
195 | if "close" not in server_connection_header:
196 | server_connection_header.insert(0, "close")
197 | else:
198 | if "close" in server_connection_header:
199 | server_connection_header.remove("close")
200 | # 将新的connection头字段更新到新的请求头中,如果为空则移除
201 | if server_connection_header:
202 | headers["connection"] = ",".join(server_connection_header)
203 | else:
204 | if "connection" in headers:
205 | del headers["connection"]
206 |
207 | # 移除"keep-alive"字段
208 | if "keep-alive" in headers:
209 | del headers["keep-alive"]
210 |
211 | return headers
212 |
213 |
214 | #################### # ####################
215 |
216 |
217 | class BaseHttpProxy(BaseProxyModel):
218 | """Http proxy base class.
219 |
220 | Attributes:
221 | client: The `httpx.AsyncClient` to send http requests.
222 | follow_redirects: Whether follow redirects of target server.
223 | """
224 |
225 | @override
226 | async def send_request_to_target( # pyright: ignore [reportIncompatibleMethodOverride]
227 | self, *, request: StarletteRequest, target_url: httpx.URL
228 | ) -> StarletteResponse:
229 | """Change request headers and send request to target url.
230 |
231 | - The http version of request must be in [`SUPPORTED_HTTP_VERSIONS`][fastapi_proxy_lib.core.http.SUPPORTED_HTTP_VERSIONS].
232 |
233 | Args:
234 | request: the original client request.
235 | target_url: target url that request will be sent to.
236 |
237 | Returns:
238 | The response from target url.
239 | """
240 | client = self.client
241 | follow_redirects = self.follow_redirects
242 |
243 | check_result = check_http_version(request.scope, SUPPORTED_HTTP_VERSIONS)
244 | if check_result is not None:
245 | return check_result
246 |
247 | # 将请求头中的host字段改为目标url的host
248 | # 同时强制移除"keep-alive"字段和添加"keep-alive"值到"connection"字段中保持连接
249 | require_close, proxy_header = _change_client_header(
250 | headers=request.headers, target_url=target_url
251 | )
252 |
253 | # 有些方法不应该包含主体
254 | request_content = (
255 | None if request.method in _NON_REQUEST_BODY_METHODS else request.stream()
256 | )
257 |
258 | # FIX: https://github.com/WSH032/fastapi-proxy-lib/security/advisories/GHSA-7vwr-g6pm-9hc8
259 | # time cost: 396 ns ± 3.39 ns
260 | # 由于这不是原子性的操作,所以不保证一定阻止cookie泄漏
261 | # 一定能保证修复的方法是通过`_tool.change_necessary_client_header_for_httpx`强制指定优先级最高的cookie头
262 | client.cookies.clear()
263 |
264 | # NOTE: 不要在这里catch `client.build_request` 和 `client.send` 的异常,因为通常来说
265 | # - 反向代理的异常需要报 5xx 错误
266 | # - 而正向代理的异常需要报 4xx 错误
267 | proxy_request = client.build_request(
268 | method=request.method,
269 | url=target_url,
270 | # TODO, FIXME: do not shallow clone (i.e, `tuple(...)`) the query_params,
271 | # see:
272 | params=tuple(request.query_params.multi_items()),
273 | headers=proxy_header,
274 | content=request_content, # FIXME: 一个已知问题是,流式响应头包含'transfer-encoding': 'chunked',但有些服务器会400拒绝这个头
275 | # cookies=request.cookies, # NOTE: headers中已有的cookie优先级高,所以这里不需要
276 | )
277 |
278 | # DEBUG: 用于调试的记录
279 | _logger.debug(
280 | "HTTP: client:%s ; url:%s ; head:%s",
281 | request.client,
282 | proxy_request.url,
283 | proxy_request.headers,
284 | )
285 |
286 | proxy_response = await client.send(
287 | proxy_request,
288 | stream=True,
289 | follow_redirects=follow_redirects,
290 | )
291 |
292 | tasks = BackgroundTasks()
293 | tasks.add_task(proxy_response.aclose) # 添加后端任务,使其在响应完后关闭
294 |
295 | # 依据先前客户端的请求,决定是否要添加"connection": "close"头到响应头中以关闭连接
296 | # https://www.uvicorn.org/server-behavior/#http-headers
297 | # 如果响应头包含"connection": "close",uvicorn会自动关闭连接
298 | proxy_response_headers = _change_server_header(
299 | headers=proxy_response.headers, require_close=require_close
300 | )
301 | return StreamingResponse(
302 | content=proxy_response.aiter_raw(),
303 | status_code=proxy_response.status_code,
304 | headers=proxy_response_headers,
305 | background=tasks,
306 | )
307 |
308 | @override
309 | async def proxy(*_: Any, **__: Any) -> NoReturn:
310 | """NotImplemented."""
311 | raise NotImplementedError()
312 |
313 |
314 | class ReverseHttpProxy(BaseHttpProxy):
315 | '''Reverse http proxy.
316 |
317 | Attributes:
318 | client: The [`httpx.AsyncClient`](https://www.python-httpx.org/api/#asyncclient) to send http requests.
319 | follow_redirects: Whether follow redirects of target server.
320 | base_url: The target server url.
321 |
322 | # # Examples
323 |
324 | ```python
325 | from contextlib import asynccontextmanager
326 | from typing import AsyncIterator
327 |
328 | from fastapi import FastAPI
329 | from fastapi_proxy_lib.core.http import ReverseHttpProxy
330 | from httpx import AsyncClient
331 | from starlette.requests import Request
332 |
333 | proxy = ReverseHttpProxy(AsyncClient(), base_url="http://www.example.com/")
334 |
335 | @asynccontextmanager
336 | async def close_proxy_event(_: FastAPI) -> AsyncIterator[None]: # (1)!
337 | """Close proxy."""
338 | yield
339 | await proxy.aclose()
340 |
341 | app = FastAPI(lifespan=close_proxy_event)
342 |
343 | @app.get("/{path:path}") # (2)!
344 | async def _(request: Request, path: str = ""):
345 | return await proxy.proxy(request=request, path=path) # (3)!
346 |
347 | # Then run shell: `uvicorn :app --host http://127.0.0.1:8000 --port 8000`
348 | # visit the app: `http://127.0.0.1:8000/`
349 | # you will get the response from `http://www.example.com/`
350 | ```
351 |
352 | 1. lifespan please refer to [starlette/lifespan](https://www.starlette.io/lifespan/)
353 | 2. `{path:path}` is the key.
354 | It allows the app to accept all path parameters.
355 | visit for more info.
356 | 3. !!! info
357 | In fact, you only need to pass the `request: Request` argument.
358 | `fastapi_proxy_lib` can automatically get the `path` from `request`.
359 | Explicitly pointing it out here is just to remind you not to forget to specify `{path:path}`.
360 | '''
361 |
362 | client: httpx.AsyncClient
363 | follow_redirects: bool
364 | base_url: httpx.URL
365 |
366 | @override
367 | def __init__(
368 | self,
369 | client: Optional[httpx.AsyncClient] = None,
370 | *,
371 | base_url: Union[httpx.URL, str],
372 | follow_redirects: bool = False,
373 | ) -> None:
374 | """Reverse http proxy.
375 |
376 | Note: please make sure `base_url` is available.
377 | Because when an error occurs,
378 | we cannot distinguish whether it is a proxy server network error, or it is a error of `base_url`.
379 | So, we will return 502 status_code whatever the error is.
380 |
381 | Args:
382 | client: The `httpx.AsyncClient` to send http requests. Defaults to None.
383 | If None, will create a new `httpx.AsyncClient`,
384 | else will use the given `httpx.AsyncClient`.
385 | follow_redirects: Whether follow redirects of target server. Defaults to False.
386 | base_url: The target proxy server url.
387 | """
388 | self.base_url = check_base_url(base_url)
389 | super().__init__(client, follow_redirects=follow_redirects)
390 |
391 | @override
392 | async def proxy( # pyright: ignore [reportIncompatibleMethodOverride]
393 | self, *, request: StarletteRequest, path: Optional[str] = None
394 | ) -> StarletteResponse:
395 | """Send request to target server.
396 |
397 | Args:
398 | request: `starlette.requests.Request`
399 | path: The path params of request, which means the path params of base url.
400 | If None, will get it from `request.path_params`.
401 | **Usually, you don't need to pass this argument**.
402 |
403 | Returns:
404 | The response from target server.
405 | """
406 | base_url = self.base_url
407 |
408 | # 只取第一个路径参数。注意,我们允许没有路径参数,这代表直接请求
409 | path_param: str = (
410 | path if path is not None else next(iter(request.path_params.values()), "")
411 | )
412 |
413 | # 将路径参数拼接到目标url上
414 | # e.g: "https://www.example.com/p0/" + "p1"
415 | # NOTE: 这里的 path_param 是不带查询参数的,且允许以 "/" 开头 (最终为/p0//p1)
416 | target_url = base_url.copy_with(
417 | path=(base_url.path + path_param)
418 | ) # 耗时: 18.4 µs ± 262 ns
419 |
420 | try:
421 | return await self.send_request_to_target(
422 | request=request, target_url=target_url
423 | )
424 | except _502_ERROR_NEED_TO_BE_CATCHED_IN_REVERSE_PROXY as e:
425 | # 请注意,反向代理服务器(即此实例)有义务保证:
426 | # 无论客户端输入的路径参数是什么,代理服务器与上游服务器之间的网络连接始终都应是可用的
427 | # 因此这里出现任何错误,都认为代理服务器(即此实例)的内部错误,将返回502
428 | msg = dedent(
429 | f"""\
430 | Error in ReverseHttpProxy().proxy():
431 | url: {target_url}
432 | request headers: {request.headers}
433 | """
434 | ) # 最好不要去查询request.body,因为可能会很大,比如在上传文件的post请求中
435 |
436 | return return_err_msg_response(
437 | _ReverseProxyServerError(
438 | "Oops! Something wrong! Please contact the server maintainer!"
439 | ),
440 | status_code=starlette_status.HTTP_502_BAD_GATEWAY,
441 | logger=_logger.exception,
442 | _msg=msg,
443 | _exc_info=e,
444 | )
445 | # NOTE: 对于反向代理服务器,我们不返回 "任何" 错误信息给客户端,因为这可能涉及到服务器内部的信息泄露
446 |
447 |
448 | class ForwardHttpProxy(BaseHttpProxy):
449 | '''Forward http proxy.
450 |
451 | Attributes:
452 | client: The [`httpx.AsyncClient`](https://www.python-httpx.org/api/#asyncclient) to send http requests.
453 | follow_redirects: Whether follow redirects of target server.
454 | proxy_filter: Callable Filter, decide whether reject the proxy requests.
455 |
456 | # # Examples
457 |
458 | ```python
459 | from contextlib import asynccontextmanager
460 | from typing import AsyncIterator
461 |
462 | from fastapi import FastAPI
463 | from fastapi_proxy_lib.core.http import ForwardHttpProxy
464 | from fastapi_proxy_lib.core.tool import default_proxy_filter
465 | from httpx import AsyncClient
466 | from starlette.requests import Request
467 |
468 | proxy = ForwardHttpProxy(AsyncClient(), proxy_filter=default_proxy_filter)
469 |
470 | @asynccontextmanager
471 | async def close_proxy_event(_: FastAPI) -> AsyncIterator[None]:
472 | """Close proxy."""
473 | yield
474 | await proxy.aclose()
475 |
476 | app = FastAPI(lifespan=close_proxy_event)
477 |
478 | @app.get("/{path:path}")
479 | async def _(request: Request, path: str = ""):
480 | return await proxy.proxy(request=request, path=path)
481 |
482 | # Then run shell: `uvicorn :app --host http://127.0.0.1:8000 --port 8000`
483 | # visit the app: `http://127.0.0.1:8000/http://www.example.com`
484 | # you will get the response from `http://www.example.com`
485 | ```
486 | '''
487 |
488 | client: httpx.AsyncClient
489 | follow_redirects: bool
490 | proxy_filter: ProxyFilterProto
491 |
492 | @override
493 | def __init__(
494 | self,
495 | client: Optional[httpx.AsyncClient] = None,
496 | *,
497 | follow_redirects: bool = False,
498 | proxy_filter: Optional[ProxyFilterProto] = None,
499 | ) -> None:
500 | """Forward http proxy.
501 |
502 | Args:
503 | client: The `httpx.AsyncClient` to send http requests. Defaults to None.
504 | If None, will create a new `httpx.AsyncClient`,
505 | else will use the given `httpx.AsyncClient`.
506 | follow_redirects: Whether follow redirects of target server. Defaults to False.
507 | proxy_filter: Callable Filter, decide whether reject the proxy requests.
508 | If None, will use the default filter.
509 | """
510 | # TODO: 当前显式发出警告是有意设计,后续会取消警告
511 | self.proxy_filter = warn_for_none_filter(proxy_filter)
512 | super().__init__(client, follow_redirects=follow_redirects)
513 |
514 | @override
515 | async def proxy( # pyright: ignore [reportIncompatibleMethodOverride]
516 | self,
517 | *,
518 | request: StarletteRequest,
519 | path: Optional[str] = None,
520 | ) -> StarletteResponse:
521 | """Send request to target server.
522 |
523 | Args:
524 | request: `starlette.requests.Request`
525 | path: The path params of request, which means the full url of target server.
526 | If None, will get it from `request.path_params`.
527 | **Usually, you don't need to pass this argument**.
528 |
529 | Returns:
530 | The response from target server.
531 | """
532 | proxy_filter = self.proxy_filter
533 |
534 | # 只取第一个路径参数
535 | path_param: str = (
536 | next(iter(request.path_params.values()), "") if path is None else path
537 | )
538 | # 如果没有路径参数,即在正向代理中未指定目标url,则返回400
539 | if path_param == "":
540 | error = _BadTargetUrlError("Must provide target url.")
541 | return return_err_msg_response(
542 | error, status_code=starlette_status.HTTP_400_BAD_REQUEST
543 | )
544 |
545 | # 尝试解析路径参数为url
546 | try:
547 | # NOTE: 在前向代理中,路径参数即为目标url。
548 | # TODO: 每次实例化URL都要消耗16.2 µs,考虑是否用lru_cache来优化
549 | target_url = httpx.URL(path_param)
550 | except httpx.InvalidURL as e: # pragma: no cover
551 | # 这个错误应该是不会被引起的,因为接收到的path_param是经过校验的
552 | # 但不排除有浏览器不遵守最大url长度限制,发出了超长的url导致InvalidURL错误
553 | # 所以我们在这里记录这个严重错误,表示要去除 `pragma: no cover`
554 | return return_err_msg_response(
555 | e,
556 | status_code=starlette_status.HTTP_400_BAD_REQUEST,
557 | logger=_logger.critical,
558 | )
559 |
560 | # 进行请求过滤
561 | filter_result = proxy_filter(target_url)
562 | if filter_result is not None:
563 | return return_err_msg_response(
564 | _RejectedProxyRequestError(filter_result),
565 | status_code=starlette_status.HTTP_403_FORBIDDEN,
566 | )
567 |
568 | try:
569 | return await self.send_request_to_target(
570 | request=request, target_url=target_url
571 | )
572 | # 需要检查客户端输入的url是否合法,包括缺少scheme; 如果不合符会引发 _400 异常
573 | except _400_ERROR_NEED_TO_BE_CATCHED_IN_FORWARD_PROXY as e:
574 | return return_err_msg_response(
575 | e, status_code=starlette_status.HTTP_400_BAD_REQUEST
576 | )
577 | except _500_ERROR_NEED_TO_BE_CATCHED_IN_FORWARD_PROXY as e:
578 | # 5xx 错误需要记录
579 | return return_err_msg_response(
580 | e,
581 | status_code=starlette_status.HTTP_500_INTERNAL_SERVER_ERROR,
582 | logger=_logger.exception,
583 | _exc_info=e,
584 | )
585 | # 请注意,我们不返回其他错误给客户端,因为这可能涉及到服务器内部的信息泄露
586 |
--------------------------------------------------------------------------------
/src/fastapi_proxy_lib/core/tool.py:
--------------------------------------------------------------------------------
1 | """User-oriented tool library."""
2 |
3 | from ._tool import ProxyFilterProto, default_proxy_filter
4 |
5 | __all__ = ("ProxyFilterProto", "default_proxy_filter")
6 |
--------------------------------------------------------------------------------
/src/fastapi_proxy_lib/fastapi/__init__.py:
--------------------------------------------------------------------------------
1 | """Module for [`fastapi`](https://fastapi.tiangolo.com/).
2 |
3 | User-oriented helper functions.
4 | """
5 |
6 | # NOTE: All user-oriented non-private functions (including local functions) must have documentation.
7 |
8 | from importlib.util import find_spec
9 | from textwrap import dedent
10 |
11 | if find_spec("fastapi") is None: # pragma: no cover # 无法测试
12 | msg: str = dedent(
13 | """\
14 | `fastapi` is not installed.
15 | `fastapi_proxy_lib.fastapi` module requires installing `fastapi` first:
16 | pip install fastapi-proxy-lib[standard]
17 | """
18 | )
19 | raise RuntimeError(msg)
20 |
--------------------------------------------------------------------------------
/src/fastapi_proxy_lib/fastapi/app.py:
--------------------------------------------------------------------------------
1 | """Utils for getting a fastapi proxy app.
2 |
3 | The high-level API for [fastapi_proxy_lib.fastapi.router][].
4 | """
5 |
6 | from typing import Optional, Union
7 |
8 | import httpx
9 | from fastapi import FastAPI
10 |
11 | from fastapi_proxy_lib.core._tool import ProxyFilterProto
12 | from fastapi_proxy_lib.core.http import ForwardHttpProxy, ReverseHttpProxy
13 | from fastapi_proxy_lib.core.websocket import (
14 | DEFAULT_KEEPALIVE_PING_INTERVAL_SECONDS,
15 | DEFAULT_KEEPALIVE_PING_TIMEOUT_SECONDS,
16 | DEFAULT_MAX_MESSAGE_SIZE_BYTES,
17 | DEFAULT_QUEUE_SIZE,
18 | ReverseWebSocketProxy,
19 | )
20 |
21 | from .router import (
22 | RouterHelper,
23 | _HttpProxyTypes, # pyright: ignore [reportPrivateUsage] # 允许使用本项目内部的私有成员
24 | _WebSocketProxyTypes, # pyright: ignore [reportPrivateUsage] # 允许使用本项目内部的私有成员
25 | )
26 |
27 | __all__ = (
28 | "forward_http_app",
29 | "reverse_http_app",
30 | "reverse_ws_app",
31 | )
32 |
33 |
34 | def _proxy2app(proxy: Union[_HttpProxyTypes, _WebSocketProxyTypes]) -> FastAPI:
35 | """Util function to register proxy to FastAPI app."""
36 | # 注意必须要新实例化一个 RouterHelper ,否则共享 RouterHelper 会导致其他app的客户端被关闭
37 | helper = RouterHelper()
38 |
39 | router = helper.register_router(proxy)
40 |
41 | app = FastAPI(lifespan=helper.get_lifespan())
42 | app.include_router(router)
43 |
44 | return app
45 |
46 |
47 | def forward_http_app(
48 | client: Optional[httpx.AsyncClient] = None,
49 | *,
50 | follow_redirects: bool = False,
51 | proxy_filter: Optional[ProxyFilterProto] = None,
52 | ) -> FastAPI:
53 | """Fastapi app factory for forward http proxy.
54 |
55 | Examples:
56 | The same as [`ForwardHttpProxy.__init__`][fastapi_proxy_lib.core.http.ForwardHttpProxy.__init__].
57 |
58 | Args:
59 | client: refer to [`ForwardHttpProxy`][fastapi_proxy_lib.core.http.ForwardHttpProxy].
60 | follow_redirects: refer to [`ForwardHttpProxy`][fastapi_proxy_lib.core.http.ForwardHttpProxy].
61 | proxy_filter: refer to [`ForwardHttpProxy`][fastapi_proxy_lib.core.http.ForwardHttpProxy].
62 | """
63 | forward_http_proxy = ForwardHttpProxy(
64 | client, proxy_filter=proxy_filter, follow_redirects=follow_redirects
65 | )
66 |
67 | return _proxy2app(forward_http_proxy)
68 |
69 |
70 | def reverse_http_app(
71 | client: Optional[httpx.AsyncClient] = None,
72 | *,
73 | base_url: Union[httpx.URL, str],
74 | follow_redirects: bool = False,
75 | ) -> FastAPI:
76 | """Fastapi app factory for reverse http proxy.
77 |
78 | Examples:
79 | The same as [`ReverseHttpProxy.__init__`][fastapi_proxy_lib.core.http.ReverseHttpProxy.__init__].
80 |
81 | Args:
82 | client: refer to [`ReverseHttpProxy`][fastapi_proxy_lib.core.http.ReverseHttpProxy].
83 | base_url: refer to [`ReverseHttpProxy`][fastapi_proxy_lib.core.http.ReverseHttpProxy].
84 | follow_redirects: refer to [`ReverseHttpProxy`][fastapi_proxy_lib.core.http.ReverseHttpProxy].
85 | """
86 | reverse_http_proxy = ReverseHttpProxy(
87 | client,
88 | base_url=base_url,
89 | follow_redirects=follow_redirects,
90 | )
91 |
92 | return _proxy2app(reverse_http_proxy)
93 |
94 |
95 | def reverse_ws_app(
96 | client: Optional[httpx.AsyncClient] = None,
97 | *,
98 | base_url: Union[httpx.URL, str],
99 | follow_redirects: bool = False,
100 | max_message_size_bytes: int = DEFAULT_MAX_MESSAGE_SIZE_BYTES,
101 | queue_size: int = DEFAULT_QUEUE_SIZE,
102 | keepalive_ping_interval_seconds: Union[
103 | float, None
104 | ] = DEFAULT_KEEPALIVE_PING_INTERVAL_SECONDS,
105 | keepalive_ping_timeout_seconds: Union[
106 | float, None
107 | ] = DEFAULT_KEEPALIVE_PING_TIMEOUT_SECONDS,
108 | ) -> FastAPI:
109 | """Fastapi app factory for reverse ws proxy.
110 |
111 | Examples:
112 | The same as [`ReverseWebSocketProxy.__init__`][fastapi_proxy_lib.core.websocket.ReverseWebSocketProxy.__init__].
113 |
114 | Args:
115 | client: refer to [`ReverseWebSocketProxy`][fastapi_proxy_lib.core.websocket.ReverseWebSocketProxy].
116 | base_url: refer to [`ReverseWebSocketProxy`][fastapi_proxy_lib.core.websocket.ReverseWebSocketProxy].
117 | follow_redirects: refer to [`ReverseWebSocketProxy`][fastapi_proxy_lib.core.websocket.ReverseWebSocketProxy].
118 | max_message_size_bytes: refer to [`ReverseWebSocketProxy`][fastapi_proxy_lib.core.websocket.ReverseWebSocketProxy].
119 | queue_size: refer to [`ReverseWebSocketProxy`][fastapi_proxy_lib.core.websocket.ReverseWebSocketProxy].
120 | keepalive_ping_interval_seconds: refer to [`ReverseWebSocketProxy`][fastapi_proxy_lib.core.websocket.ReverseWebSocketProxy].
121 | keepalive_ping_timeout_seconds: refer to [`ReverseWebSocketProxy`][fastapi_proxy_lib.core.websocket.ReverseWebSocketProxy].
122 | """
123 | reverse_websocket_proxy = ReverseWebSocketProxy(
124 | client,
125 | base_url=base_url,
126 | follow_redirects=follow_redirects,
127 | max_message_size_bytes=max_message_size_bytes,
128 | queue_size=queue_size,
129 | keepalive_ping_interval_seconds=keepalive_ping_interval_seconds,
130 | keepalive_ping_timeout_seconds=keepalive_ping_timeout_seconds,
131 | )
132 |
133 | return _proxy2app(reverse_websocket_proxy)
134 |
--------------------------------------------------------------------------------
/src/fastapi_proxy_lib/fastapi/router.py:
--------------------------------------------------------------------------------
1 | """Utils for registering proxy to fastapi router.
2 |
3 | The low-level API for [fastapi_proxy_lib.fastapi.app][].
4 | """
5 |
6 | import asyncio
7 | import warnings
8 | from collections.abc import AsyncIterator
9 | from contextlib import AbstractAsyncContextManager, asynccontextmanager
10 | from typing import (
11 | Any,
12 | Callable,
13 | Literal,
14 | Optional,
15 | TypeVar,
16 | Union,
17 | )
18 |
19 | from fastapi import APIRouter
20 | from starlette.requests import Request
21 | from starlette.responses import Response
22 | from starlette.websockets import WebSocket
23 | from typing_extensions import overload
24 |
25 | from fastapi_proxy_lib.core.http import ForwardHttpProxy, ReverseHttpProxy
26 | from fastapi_proxy_lib.core.websocket import ReverseWebSocketProxy
27 |
28 | __all__ = ("RouterHelper",)
29 |
30 |
31 | _HttpProxyTypes = Union[ForwardHttpProxy, ReverseHttpProxy]
32 | _WebSocketProxyTypes = ReverseWebSocketProxy
33 |
34 |
35 | _APIRouterTypes = TypeVar("_APIRouterTypes", bound=APIRouter)
36 |
37 |
38 | def _http_register_router(
39 | proxy: _HttpProxyTypes,
40 | router: APIRouter,
41 | **kwargs: Any,
42 | ) -> None:
43 | """Bind http proxy to router.
44 |
45 | Args:
46 | proxy: http proxy to bind.
47 | router: fastapi router to bind.
48 | **kwargs: The kwargs to pass to router endpoint(e.g `@router.get()`).
49 |
50 | Returns:
51 | None. Just do binding proxy to router.
52 | """
53 | kwargs.pop("path", None)
54 |
55 | @router.get("/{path:path}", **kwargs)
56 | @router.post("/{path:path}", **kwargs)
57 | @router.put("/{path:path}", **kwargs)
58 | @router.delete("/{path:path}", **kwargs)
59 | @router.options("/{path:path}", **kwargs)
60 | @router.head("/{path:path}", **kwargs)
61 | @router.patch("/{path:path}", **kwargs)
62 | @router.trace("/{path:path}", **kwargs)
63 | async def http_proxy( # pyright: ignore[reportUnusedFunction]
64 | request: Request, path: str = ""
65 | ) -> Response:
66 | """HTTP proxy endpoint.
67 |
68 | Args:
69 | request: The original request from client.
70 | path: The path parameters of request.
71 |
72 | Returns:
73 | The response from target server.
74 | """
75 | return await proxy.proxy(request=request, path=path)
76 |
77 |
78 | def _ws_register_router(
79 | proxy: _WebSocketProxyTypes,
80 | router: APIRouter,
81 | **kwargs: Any,
82 | ) -> None:
83 | """Bind websocket proxy to router.
84 |
85 | Args:
86 | proxy: websocket proxy to bind.
87 | router: fastapi router to bind.
88 | **kwargs: The kwargs to pass to router endpoint(e.g `@router.websocket()`).
89 |
90 | Returns:
91 | None. Just do binding proxy to router.
92 | """
93 | kwargs.pop("path", None)
94 |
95 | @router.websocket("/{path:path}", **kwargs)
96 | async def ws_proxy( # pyright: ignore[reportUnusedFunction]
97 | websocket: WebSocket, path: str = ""
98 | ) -> Union[Response, Literal[False]]:
99 | """WebSocket proxy endpoint.
100 |
101 | Args:
102 | websocket: The original websocket request from client.
103 | path: The path parameters of request.
104 |
105 | Returns:
106 | If the establish websocket connection unsuccessfully:
107 | - Will call `websocket.close()` to send code `4xx`
108 | - Then return a `StarletteResponse` from target server
109 | If the establish websocket connection successfully:
110 | - Will run forever until the connection is closed. Then return False.
111 | """
112 | return await proxy.proxy(websocket=websocket, path=path)
113 |
114 |
115 | class RouterHelper:
116 | """Helper class to register proxy to fastapi router.
117 |
118 | # # Examples
119 |
120 | ```python
121 | from fastapi import APIRouter, FastAPI
122 | from fastapi_proxy_lib.core.http import ForwardHttpProxy, ReverseHttpProxy
123 | from fastapi_proxy_lib.core.tool import default_proxy_filter
124 | from fastapi_proxy_lib.core.websocket import ReverseWebSocketProxy
125 | from fastapi_proxy_lib.fastapi.router import RouterHelper
126 |
127 | reverse_http_proxy = ReverseHttpProxy(base_url="http://www.example.com/")
128 | reverse_ws_proxy = ReverseWebSocketProxy(base_url="ws://echo.websocket.events/")
129 | forward_http_proxy = ForwardHttpProxy(proxy_filter=default_proxy_filter)
130 |
131 | helper = RouterHelper()
132 |
133 | reverse_http_router = helper.register_router(
134 | reverse_http_proxy,
135 | APIRouter(prefix="/reverse"), # (1)!
136 | )
137 | forward_http_router = helper.register_router(
138 | forward_http_proxy,
139 | APIRouter(prefix="/forward"),
140 | )
141 | reverse_ws_router = helper.register_router(reverse_ws_proxy) # (2)!
142 |
143 | app = FastAPI(lifespan=helper.get_lifespan()) # (3)!
144 |
145 | app.include_router(reverse_http_router, prefix="/http") # (4)!
146 | app.include_router(forward_http_router, prefix="/http")
147 | app.include_router(reverse_ws_router, prefix="/ws")
148 |
149 | # reverse http proxy on "/http/reverse"
150 | # forward http proxy on "/http/forward"
151 | # reverse websocket proxy on "/ws"
152 | ```
153 |
154 | 1. You can pass any arguments to [`APIRouter()`][fastapi.APIRouter] if you want.
155 | 2. Or, with default values, `RouterHelper` will create a new router for you.
156 | 3. Registering a lifespan event to close all proxies is a recommended action.
157 | 4. You can use the proxy router just like a normal `APIRouter`.
158 |
159 | Info:
160 | Technically, [fastapi_proxy_lib.fastapi.app][] does the same thing,
161 | including automatically registering lifespan events.
162 |
163 | Abstract: Compared to using the proxy base-class directly, the advantages of using `RouterHelper` are:
164 | - `RouterHelper` automatically registers all HTTP methods (e.g. `GET`, `POST`, etc.) for you.
165 | It also registers WebSocket endpoint if you pass in a WebSocket proxy class.
166 | - Conveniently get the `lifespan` for close all proxies by using `helper.get_lifespan()`.
167 | """
168 |
169 | def __init__(self) -> None:
170 | """Initialize RouterHelper."""
171 | self._registered_proxy: set[Union[_HttpProxyTypes, _WebSocketProxyTypes]] = (
172 | set()
173 | )
174 | self._registered_router_id: set[int] = set()
175 |
176 | @property
177 | def registered_proxy(self) -> set[Union[_HttpProxyTypes, _WebSocketProxyTypes]]:
178 | """The proxy that has been registered."""
179 | return self._registered_proxy
180 |
181 | @overload
182 | def register_router(
183 | self,
184 | proxy: Union[_HttpProxyTypes, _WebSocketProxyTypes],
185 | router: Optional[None] = None,
186 | **endpoint_kwargs: Any,
187 | ) -> APIRouter:
188 | # If router is None, will create a new router.
189 | ...
190 |
191 | @overload
192 | def register_router(
193 | self,
194 | proxy: Union[_HttpProxyTypes, _WebSocketProxyTypes],
195 | router: _APIRouterTypes,
196 | **endpoint_kwargs: Any,
197 | ) -> _APIRouterTypes:
198 | # If router is not None, will use the given router.
199 | ...
200 |
201 | def register_router(
202 | self,
203 | proxy: Union[_HttpProxyTypes, _WebSocketProxyTypes],
204 | router: Optional[APIRouter] = None,
205 | **endpoint_kwargs: Any,
206 | ) -> APIRouter:
207 | """Register proxy to router.
208 |
209 | Args:
210 | proxy: The `http`/`websocket` proxy to register.
211 | router: The fastapi router to register. If None, will create a new router.
212 | Usually, you don't need to set the argument, unless you want set some arguments to router.
213 | **Note: the same router can only be registered once**.
214 | **endpoint_kwargs: The kwargs which is passed to router endpoint, e.g:
215 | - [`@router.get(**endpoint_kwargs)`][fastapi.APIRouter.get]
216 | - [`@router.websocket(**endpoint_kwargs)`][fastapi.APIRouter.websocket]
217 |
218 | Raises:
219 | TypeError: If pass a unknown type of `proxy` arg.
220 |
221 | Returns:
222 | A [fastapi router][fastapi.APIRouter], which proxy endpoint has been registered on root route: `'/'`.
223 | """
224 | router = APIRouter() if router is None else router
225 |
226 | # 检查传入的 router 是否已经被注册过,因为 router 不能hash,所以只能用id来判断
227 | # HACK: 如果之前记录的router已经被销毁了,新的router可能会有相同的id
228 | router_id = id(router)
229 | if id(router) in self._registered_router_id:
230 | msg = (
231 | f"The router {router} (id: {router_id}) has been registered, "
232 | f"\033[33myou should not use it to register again in any case\033[m."
233 | )
234 | warnings.warn(msg, stacklevel=2)
235 | else:
236 | self._registered_router_id.add(router_id)
237 |
238 | if isinstance(proxy, (ForwardHttpProxy, ReverseHttpProxy)):
239 | _http_register_router(proxy, router, **endpoint_kwargs)
240 | elif isinstance(
241 | proxy, ReverseWebSocketProxy
242 | ): # pyright: ignore[reportUnnecessaryIsInstance]
243 | _ws_register_router(proxy, router, **endpoint_kwargs)
244 | else:
245 | msg = (
246 | f"Unknown proxy type: {type(proxy)}, "
247 | f"only support: {_HttpProxyTypes} and {_WebSocketProxyTypes}"
248 | )
249 | raise TypeError(msg)
250 |
251 | self._registered_proxy.add(proxy)
252 | return router
253 |
254 | def get_lifespan(self) -> Callable[..., AbstractAsyncContextManager[None]]:
255 | """The lifespan event for closing registered proxy.
256 |
257 | Returns:
258 | asynccontextmanager for closing registered proxy,
259 | refer to [lifespan](https://fastapi.tiangolo.com/advanced/events/#lifespan)
260 | """
261 |
262 | @asynccontextmanager
263 | async def shutdown_clients(*_: Any, **__: Any) -> AsyncIterator[None]:
264 | """Asynccontextmanager for closing registered proxy.
265 |
266 | Args:
267 | *_: Whatever.
268 | **__: Whatever.
269 |
270 | Returns:
271 | When __aexit__ is called, will close all registered proxy.
272 | """
273 | yield
274 | await asyncio.gather(*[proxy.aclose() for proxy in self.registered_proxy])
275 |
276 | return shutdown_clients
277 |
--------------------------------------------------------------------------------
/src/fastapi_proxy_lib/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WSH032/fastapi-proxy-lib/fb53f4af44f9ca03bd67d5a551e5176348398329/src/fastapi_proxy_lib/py.typed
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Tests for the project."""
2 |
--------------------------------------------------------------------------------
/tests/app/echo_http_app.py:
--------------------------------------------------------------------------------
1 | # ruff: noqa: D100, D417
2 | # pyright: reportUnusedFunction=false
3 |
4 | import io
5 | from collections.abc import Mapping
6 | from typing import Literal, Union
7 |
8 | from fastapi import FastAPI, Request, Response
9 | from fastapi.responses import StreamingResponse
10 |
11 | from .tool import AppDataclass4Test, RequestDict
12 |
13 | DEFAULT_FILE_NAME = "echo.txt"
14 |
15 |
16 | def get_app() -> AppDataclass4Test: # noqa: C901
17 | """Get the echo http app.
18 |
19 | Returns:
20 | TestAppDataclass.
21 | """
22 | app = FastAPI()
23 | request_dict = RequestDict(request=None)
24 | test_app_dataclass = AppDataclass4Test(app=app, request_dict=request_dict)
25 |
26 | @app.head("/head/return_keep_alive_headers")
27 | async def return_keep_alive_headers(request: Request) -> Response:
28 | """Http head method endpoint.
29 |
30 | Returns:
31 | A response of which headers has:
32 | - Connection: keep-alive
33 | - Keep-Alive: timeout=5, max=1000
34 | """
35 | nonlocal test_app_dataclass
36 | test_app_dataclass.request_dict["request"] = request
37 | return Response(
38 | headers={"Connection": "keep-alive", "Keep-Alive": "timeout=5, max=1000"}
39 | )
40 |
41 | @app.head("/head/return_close_connection_headers")
42 | async def return_close_connection_headers(request: Request) -> Response:
43 | """Http head method endpoint.
44 |
45 | Returns:
46 | A response of which headers contains `{"Connection": "close"}`.
47 | """
48 | nonlocal test_app_dataclass
49 | test_app_dataclass.request_dict["request"] = request
50 | return Response(headers={"Connection": "close"})
51 |
52 | @app.head("/head/return_none_connection_headers")
53 | async def return_none_connection_headers(request: Request) -> Response:
54 | """Http head method endpoint.
55 |
56 | Returns:
57 | A response of which headers contains `{"Connection": "close"}`.
58 | """
59 | nonlocal test_app_dataclass
60 | test_app_dataclass.request_dict["request"] = request
61 | return Response(headers={})
62 |
63 | @app.get("/get/echo_headers_and_params")
64 | async def echo_headers_and_params(
65 | request: Request,
66 | ) -> Mapping[str, Union[str, Mapping[str, str]]]:
67 | """Http get method endpoint for echo headers, path_params, query_params.
68 |
69 | Returns:
70 | ```py
71 | {
72 | **request.headers,
73 | "path_params": request.path_params,
74 | "query_params": request.query_params,
75 | }
76 | ```
77 | """
78 | nonlocal test_app_dataclass
79 | test_app_dataclass.request_dict["request"] = request
80 | msg = {
81 | **request.headers,
82 | "path_params": request.path_params,
83 | "query_params": request.query_params,
84 | }
85 | return msg
86 |
87 | @app.get("/get/cookies")
88 | async def cookies(
89 | request: Request,
90 | ) -> Mapping[str, str]:
91 | """Returns cookie of request."""
92 | nonlocal test_app_dataclass
93 | test_app_dataclass.request_dict["request"] = request
94 | return request.cookies
95 |
96 | @app.get("/get/cookies/set/{key}/{value}")
97 | async def cookies_set(
98 | request: Request,
99 | key: str,
100 | value: str,
101 | ) -> Response:
102 | """Returns a response which requires client to set cookie: `key=value`."""
103 | nonlocal test_app_dataclass
104 | test_app_dataclass.request_dict["request"] = request
105 | response = Response()
106 | response.set_cookie(key=key, value=value)
107 | return response
108 |
109 | @app.post("/post/echo_body")
110 | async def echo_body(request: Request) -> Response:
111 | """Http post method endpoint for echo body.
112 |
113 | Returns:
114 | A response of which body is the same as request body.
115 | """
116 | nonlocal test_app_dataclass
117 | test_app_dataclass.request_dict["request"] = request
118 | return Response(content=await request.body())
119 |
120 | @app.put("/put/echo_file")
121 | async def echo_file(request: Request, content: str = "") -> StreamingResponse:
122 | """Http put method endpoint for echo file.
123 |
124 | Args:
125 | content: The content of file to echo.
126 |
127 | Returns:
128 | A response of file StreamingResponse
129 | - body: file stream of which content is `/put/echo_file?content={content}`.
130 | - Headers:
131 | - Media-Type: `text/plain; charset=utf-8`
132 | - Content-Disposition: `attachment; filename=echo.txt`
133 | """
134 | nonlocal test_app_dataclass
135 | test_app_dataclass.request_dict["request"] = request
136 | txt_file_like = io.BytesIO(content.encode("utf-8"))
137 | return StreamingResponse(
138 | txt_file_like,
139 | media_type="text/plain; charset=utf-8",
140 | headers={
141 | "Content-Disposition": f"attachment; filename={DEFAULT_FILE_NAME}"
142 | },
143 | )
144 |
145 | @app.options("/")
146 | async def options_endpoint(request: Request) -> Response:
147 | """Http options method endpoint.
148 |
149 | Returns:
150 | A response of which headers contains `{"Allow": "GET, POST, PUT, OPTIONS, HEAD, DELETE, PATCH"}`.
151 | """
152 | nonlocal test_app_dataclass
153 | test_app_dataclass.request_dict["request"] = request
154 | return Response(
155 | headers={"Allow": "GET, POST, PUT, OPTIONS, HEAD, DELETE, PATCH"}
156 | )
157 |
158 | @app.get("/")
159 | @app.post("/")
160 | @app.put("/")
161 | @app.head("/")
162 | @app.delete("/")
163 | @app.patch("/")
164 | async def _(request: Request) -> Literal[0]:
165 | nonlocal test_app_dataclass
166 | test_app_dataclass.request_dict["request"] = request
167 | return 0
168 |
169 | return test_app_dataclass
170 |
171 |
172 | # for cmd test
173 | app = get_app().app
174 |
--------------------------------------------------------------------------------
/tests/app/echo_ws_app.py:
--------------------------------------------------------------------------------
1 | # ruff: noqa: D100
2 | # pyright: reportUnusedFunction=false
3 |
4 | import asyncio
5 |
6 | from fastapi import FastAPI, WebSocket
7 | from starlette.websockets import WebSocketDisconnect
8 |
9 | from .tool import AppDataclass4Test, RequestDict
10 |
11 |
12 | def get_app() -> AppDataclass4Test: # noqa: C901, PLR0915
13 | """Get the echo ws app.
14 |
15 | Returns:
16 | TestAppDataclass.
17 | """
18 | app = FastAPI()
19 | request_dict = RequestDict(request=None)
20 | test_app_dataclass = AppDataclass4Test(app=app, request_dict=request_dict)
21 |
22 | @app.websocket("/echo_text")
23 | async def echo_text(websocket: WebSocket):
24 | """Websocket endpoint for echo text. Just receive text and send it back.
25 |
26 | Note: client must send text first.
27 | """
28 | nonlocal test_app_dataclass
29 | test_app_dataclass.request_dict["request"] = websocket
30 |
31 | await websocket.accept()
32 | while True:
33 | try:
34 | recev = await websocket.receive_text()
35 | await websocket.send_text(recev)
36 | except WebSocketDisconnect:
37 | break
38 |
39 | @app.websocket("/echo_bytes")
40 | async def echo_bytes(websocket: WebSocket):
41 | """Websocket endpoint for echo bytes. Just receive bytes and send it back.
42 |
43 | Note: client must send bytes first.
44 | """
45 | nonlocal test_app_dataclass
46 | test_app_dataclass.request_dict["request"] = websocket
47 |
48 | await websocket.accept()
49 | while True:
50 | try:
51 | recev = await websocket.receive_bytes()
52 | await websocket.send_bytes(recev)
53 | except WebSocketDisconnect:
54 | break
55 |
56 | @app.websocket("/accept_foo_subprotocol")
57 | async def accept_foo_subprotocol(websocket: WebSocket):
58 | """When client send subprotocols request, if subprotocols contain "foo", will accept it."""
59 | nonlocal test_app_dataclass
60 | test_app_dataclass.request_dict["request"] = websocket
61 |
62 | # https://asgi.readthedocs.io/en/latest/specs/www.html#websocket-connection-scope
63 | if "foo" in websocket.scope["subprotocols"]:
64 | accepted_subprotocol = "foo"
65 | else:
66 | accepted_subprotocol = None
67 |
68 | await websocket.accept(subprotocol=accepted_subprotocol)
69 |
70 | await websocket.close()
71 |
72 | @app.websocket("/just_close_with_1001")
73 | async def just_close_with_1001(websocket: WebSocket):
74 | """Just do nothing after `accept`, then close ws with 1001 code."""
75 | nonlocal test_app_dataclass
76 | test_app_dataclass.request_dict["request"] = websocket
77 |
78 | await websocket.accept()
79 | await asyncio.sleep(0.3)
80 | await websocket.close(1001)
81 |
82 | @app.websocket("/reject_handshake")
83 | async def reject_handshake(websocket: WebSocket):
84 | """Will reject ws request by just calling `websocket.close()`."""
85 | nonlocal test_app_dataclass
86 | test_app_dataclass.request_dict["request"] = websocket
87 |
88 | await websocket.close()
89 |
90 | @app.websocket("/do_nothing")
91 | async def do_nothing(websocket: WebSocket):
92 | """Will do nothing except `websocket.accept()`."""
93 | nonlocal test_app_dataclass
94 | test_app_dataclass.request_dict["request"] = websocket
95 |
96 | await websocket.accept()
97 |
98 | return test_app_dataclass
99 |
100 |
101 | # for cmd test
102 | app = get_app().app
103 |
--------------------------------------------------------------------------------
/tests/app/tool.py:
--------------------------------------------------------------------------------
1 | # noqa: D100
2 |
3 | import asyncio
4 | import socket
5 | from dataclasses import dataclass
6 | from typing import Any, Callable, Optional, TypedDict, TypeVar, Union
7 |
8 | import httpx
9 | import uvicorn
10 | from fastapi import FastAPI
11 | from starlette.requests import Request
12 | from starlette.websockets import WebSocket
13 | from typing_extensions import Self, override
14 |
15 | _Decoratable_T = TypeVar("_Decoratable_T", bound=Union[Callable[..., Any], type[Any]])
16 |
17 | ServerRecvRequestsTypes = Union[Request, WebSocket]
18 |
19 |
20 | class RequestDict(TypedDict):
21 | """Request TypedDict."""
22 |
23 | request: Union[ServerRecvRequestsTypes, None]
24 | """The latest original http/websocket request from the client."""
25 |
26 |
27 | @dataclass
28 | class AppDataclass4Test:
29 | """Test app dataclass.
30 |
31 | Attributes:
32 | app: The FastAPI app for test.
33 | request_dict: use `request["request"]` to get the latest original http/websocket request from the client.
34 | """
35 |
36 | app: FastAPI
37 | request_dict: RequestDict
38 |
39 | def get_request(self) -> ServerRecvRequestsTypes:
40 | """Get the latest original http/websocket request from the client.
41 |
42 | equal to self.request_dict["request"].
43 | """
44 | server_recv_request = self.request_dict["request"]
45 | assert server_recv_request is not None, "Please send request first."
46 | return server_recv_request
47 |
48 |
49 | def _no_override_uvicorn_server(_method: _Decoratable_T) -> _Decoratable_T:
50 | """Check if the method is already in `uvicorn.Server`."""
51 | assert not hasattr(
52 | uvicorn.Server, _method.__name__
53 | ), f"Override method of `uvicorn.Server` cls : {_method.__name__}"
54 | return _method
55 |
56 |
57 | class AeixtTimeoutUndefine:
58 | """Didn't set `contx_exit_timeout` in `aexit()`."""
59 |
60 |
61 | aexit_timeout_undefine = AeixtTimeoutUndefine()
62 |
63 |
64 | # HACK: 不能继承 AbstractAsyncContextManager[Self]
65 | # 目前有问题,继承 AbstractAsyncContextManager 的话pyright也推测不出来类型
66 | # 只能依靠 __aenter__ 和 __aexit__ 的类型注解
67 | class UvicornServer(uvicorn.Server):
68 | """subclass of `uvicorn.Server` which can use AsyncContext to launch and shutdown automatically.
69 |
70 | Attributes:
71 | contx_server_task: The task of server.
72 | contx_socket: The socket of server.
73 |
74 | other attributes are same as `uvicorn.Server`:
75 | - config: The config arg that be passed in.
76 | ...
77 | """
78 |
79 | _contx_server_task: Union["asyncio.Task[None]", None]
80 | assert not hasattr(uvicorn.Server, "_contx_server_task")
81 |
82 | _contx_socket: Union[socket.socket, None]
83 | assert not hasattr(uvicorn.Server, "_contx_socket")
84 |
85 | _contx_server_started_event: Union[asyncio.Event, None]
86 | assert not hasattr(uvicorn.Server, "_contx_server_started_event")
87 |
88 | contx_exit_timeout: Union[int, float, None]
89 | assert not hasattr(uvicorn.Server, "contx_exit_timeout")
90 |
91 | @override
92 | def __init__(
93 | self, config: uvicorn.Config, contx_exit_timeout: Union[int, float, None] = None
94 | ) -> None:
95 | """The same as `uvicorn.Server.__init__`."""
96 | super().__init__(config=config)
97 | self._contx_server_task = None
98 | self._contx_socket = None
99 | self._contx_server_started_event = None
100 | self.contx_exit_timeout = contx_exit_timeout
101 |
102 | @override
103 | async def startup(self, sockets: Optional[list[socket.socket]] = None) -> None:
104 | """The same as `uvicorn.Server.startup`."""
105 | super_return = await super().startup(sockets=sockets)
106 | self.contx_server_started_event.set()
107 | return super_return
108 |
109 | @_no_override_uvicorn_server
110 | async def aenter(self) -> Self:
111 | """Launch the server."""
112 | # 在分配资源之前,先检查是否重入
113 | if self.contx_server_started_event.is_set():
114 | raise RuntimeError("DO not launch server by __aenter__ again!")
115 |
116 | # FIXME: # 这个socket被设计为可被同一进程内的多个server共享,可能会引起潜在问题
117 | self._contx_socket = self.config.bind_socket()
118 |
119 | self._contx_server_task = asyncio.create_task(
120 | self.serve([self._contx_socket]), name=f"Uvicorn Server Task of {self}"
121 | )
122 | # 在 uvicorn.Server 的实现中,Server.serve() 内部会调用 Server.startup() 完成启动
123 | # 被覆盖的 self.startup() 会在完成时调用 self.contx_server_started_event.set()
124 | await self.contx_server_started_event.wait() # 等待服务器确实启动后才返回
125 | return self
126 |
127 | @_no_override_uvicorn_server
128 | async def __aenter__(self) -> Self:
129 | """Launch the server.
130 |
131 | The same as `self.aenter()`.
132 | """
133 | return await self.aenter()
134 |
135 | @_no_override_uvicorn_server
136 | async def aexit(
137 | self,
138 | contx_exit_timeout: Union[
139 | int, float, None, AeixtTimeoutUndefine
140 | ] = aexit_timeout_undefine,
141 | ) -> None:
142 | """Shutdown the server."""
143 | contx_server_task = self.contx_server_task
144 | contx_socket = self.contx_socket
145 |
146 | if isinstance(contx_exit_timeout, AeixtTimeoutUndefine):
147 | contx_exit_timeout = self.contx_exit_timeout
148 |
149 | # 在 uvicorn.Server 的实现中,设置 should_exit 可以使得 server 任务结束
150 | assert hasattr(self, "should_exit")
151 | self.should_exit = True
152 |
153 | try:
154 | await asyncio.wait_for(contx_server_task, timeout=contx_exit_timeout)
155 | except asyncio.TimeoutError:
156 | print(f"{contx_server_task.get_name()} timeout!")
157 | finally:
158 | # 其实uvicorn.Server会自动关闭socket,这里是为了保险起见
159 | contx_socket.close()
160 |
161 | @_no_override_uvicorn_server
162 | async def __aexit__(self, *_: Any, **__: Any) -> None:
163 | """Shutdown the server.
164 |
165 | The same as `self.aexit()`.
166 | """
167 | return await self.aexit()
168 |
169 | @property
170 | @_no_override_uvicorn_server
171 | def contx_server_started_event(self) -> asyncio.Event:
172 | """The event that indicates the server has started.
173 |
174 | When first call the property, it will instantiate a `asyncio.Event()`to
175 | `self._contx_server_started_event`.
176 |
177 | Warn: This is a internal implementation detail, do not change the event manually.
178 | - please call the property in `self.aenter()` or `self.startup()` **first**.
179 | - **Never** call it outside of an async event loop first:
180 | https://stackoverflow.com/questions/53724665/using-queues-results-in-asyncio-exception-got-future-future-pending-attached
181 | """
182 | if self._contx_server_started_event is None:
183 | self._contx_server_started_event = asyncio.Event()
184 |
185 | return self._contx_server_started_event
186 |
187 | @property
188 | @_no_override_uvicorn_server
189 | def contx_socket(self) -> socket.socket:
190 | """The socket of server.
191 |
192 | Note: must call `self.__aenter__()` first.
193 | """
194 | if self._contx_socket is None:
195 | raise RuntimeError("Please call `self.__aenter__()` first.")
196 | else:
197 | return self._contx_socket
198 |
199 | @property
200 | @_no_override_uvicorn_server
201 | def contx_server_task(self) -> "asyncio.Task[None]":
202 | """The task of server.
203 |
204 | Note: must call `self.__aenter__()` first.
205 | """
206 | if self._contx_server_task is None:
207 | raise RuntimeError("Please call `self.__aenter__()` first.")
208 | else:
209 | return self._contx_server_task
210 |
211 | @property
212 | @_no_override_uvicorn_server
213 | def contx_socket_getname(self) -> Any:
214 | """Utils for calling self.contx_socket.getsockname().
215 |
216 | Return:
217 | refer to: https://docs.python.org/zh-cn/3/library/socket.html#socket-families
218 | """
219 | return self.contx_socket.getsockname()
220 |
221 | @property
222 | @_no_override_uvicorn_server
223 | def contx_socket_url(self) -> httpx.URL:
224 | """If server is tcp socket, return the url of server.
225 |
226 | Note: The path of url is explicitly set to "/".
227 | """
228 | config = self.config
229 | if config.fd is not None or config.uds is not None:
230 | raise RuntimeError("Only support tcp socket.")
231 | host, port = self.contx_socket_getname[:2]
232 | return httpx.URL(
233 | host=host,
234 | port=port,
235 | scheme="https" if config.is_ssl else "http",
236 | path="/",
237 | )
238 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | # ruff: noqa: ANN201, ANN001
2 |
3 | # pyright: reportMissingParameterType=false
4 | # 返回值标注太麻烦,让pyright自己推断
5 |
6 | """Pytest fixtures for the tests."""
7 |
8 | # https://anyio.readthedocs.io/en/stable/testing.html
9 |
10 | import typing
11 | from collections.abc import AsyncIterator, Coroutine
12 | from contextlib import AsyncExitStack
13 | from dataclasses import dataclass
14 | from typing import (
15 | Callable,
16 | Literal,
17 | Protocol,
18 | Union,
19 | )
20 |
21 | import pytest
22 | import uvicorn
23 | from asgi_lifespan import LifespanManager
24 | from typing_extensions import ParamSpec
25 |
26 | from fastapi_proxy_lib.fastapi.app import (
27 | forward_http_app,
28 | reverse_http_app,
29 | reverse_ws_app,
30 | )
31 |
32 | from .app.echo_http_app import get_app as get_http_test_app
33 | from .app.echo_ws_app import get_app as get_ws_test_app
34 | from .app.tool import AppDataclass4Test, UvicornServer
35 |
36 | # ASGI types.
37 | # Copied from: https://github.com/florimondmanca/asgi-lifespan/blob/fbb0f440337314be97acaae1a3c0c7a2ec8298dd/src/asgi_lifespan/_types.py
38 | Scope = typing.MutableMapping[str, typing.Any]
39 | Message = typing.MutableMapping[str, typing.Any]
40 | Receive = typing.Callable[[], typing.Awaitable[Message]]
41 | Send = typing.Callable[[Message], typing.Awaitable[None]]
42 | ASGIApp = typing.Callable[[Scope, Receive, Send], typing.Awaitable[None]]
43 |
44 |
45 | _P = ParamSpec("_P")
46 |
47 |
48 | @dataclass
49 | class LifeAppDataclass4Test(AppDataclass4Test):
50 | """Test app with lifespan dataclass.
51 |
52 | Attributes:
53 | app: The asgi app for test.
54 | request_dict: use `request["request"]` to get the latest original http/websocket request from the client.
55 | """
56 |
57 | app: ASGIApp # pyright: ignore[reportIncompatibleVariableOverride]
58 |
59 |
60 | LifespanManagerFixture = typing.Callable[[ASGIApp], Coroutine[None, None, ASGIApp]]
61 | AppFactoryFixture = Callable[..., Coroutine[None, None, ASGIApp]]
62 | """The lifespan of app will be managed automatically by pytest."""
63 |
64 |
65 | class UvicornServerFixture(Protocol): # noqa: D101
66 | def __call__( # noqa: D102
67 | self, config: uvicorn.Config, contx_exit_timeout: Union[int, float, None] = None
68 | ) -> Coroutine[None, None, UvicornServer]: ...
69 |
70 |
71 | # https://anyio.readthedocs.io/en/stable/testing.html#specifying-the-backends-to-run-on
72 | @pytest.fixture
73 | def anyio_backend() -> Literal["asyncio"]:
74 | """Specify the async backend for `pytest.mark.anyio`."""
75 | return "asyncio"
76 |
77 |
78 | @pytest.fixture
79 | async def lifespan_manager() -> AsyncIterator[LifespanManagerFixture]:
80 | """Fixture for asgi lifespan manager.
81 |
82 | Returns:
83 | _lifespan_manager: (LifespanManagerFixture)
84 | """
85 | async with AsyncExitStack() as exit_stack:
86 |
87 | async def _lifespan_manager(app: ASGIApp) -> ASGIApp:
88 | """Manage lifespan event for app.
89 |
90 | Args:
91 | app: The app of which lifespan event need to be managed.
92 |
93 | Returns:
94 | ASGIApp: The app with lifespan event managed.
95 | """
96 | nonlocal exit_stack
97 | manager = await exit_stack.enter_async_context(LifespanManager(app))
98 | return manager.app
99 |
100 | yield _lifespan_manager
101 |
102 |
103 | # TestAppDataclass 设计的时候,TestAppDataclass.request 只存取最新的一个请求
104 | # 所以这里明确要求每个fixture的作用域都是"function",不要共享 TestAppDataclass
105 |
106 |
107 | @pytest.fixture
108 | async def echo_http_test_model(
109 | lifespan_manager: LifespanManagerFixture,
110 | ) -> LifeAppDataclass4Test:
111 | """Echo http app for test.
112 |
113 | Returns:
114 | LifeAppDataclass4Test: refer to `test.app.echo_http_app.get_app()`.
115 | LifeAppDataclass4Test.app: The echo http app for test
116 | def LifeAppDataclass4Test.request(): Get the latest original http request from the client
117 | """
118 | app_dataclass = get_http_test_app()
119 | life_app = await lifespan_manager(app_dataclass.app)
120 | return LifeAppDataclass4Test(app=life_app, request_dict=app_dataclass.request_dict)
121 |
122 |
123 | @pytest.fixture
124 | async def echo_ws_test_model(
125 | lifespan_manager: LifespanManagerFixture,
126 | ) -> LifeAppDataclass4Test:
127 | """Echo ws app for test.
128 |
129 | Returns:
130 | LifeAppDataclass4Test: refer to `test.app.echo_ws_app.get_app()`.
131 | LifeAppDataclass4Test.app: The echo ws app for test
132 | def LifeAppDataclass4Test.request(): Get the latest original http request from the client
133 | """
134 | app_dataclass = get_ws_test_app()
135 | life_app = await lifespan_manager(app_dataclass.app)
136 | return LifeAppDataclass4Test(app=life_app, request_dict=app_dataclass.request_dict)
137 |
138 |
139 | def _app_fct_life_wapper( # noqa: D417
140 | app_fct: Callable[_P, ASGIApp], lifespan_manager_fixture: LifespanManagerFixture
141 | ) -> Callable[_P, Coroutine[None, None, ASGIApp]]:
142 | """A wrapper for app factory function.
143 |
144 | Make the lifespan event of the app returned by `app_fct()` be managed automatically by pytest.
145 |
146 | Args:
147 | app_fct: The app factory function which need to be wrapped.
148 |
149 | Returns:
150 | The wrapped app factory function.
151 | """
152 |
153 | async def wappered_app_fct(*args: _P.args, **kwargs: _P.kwargs) -> ASGIApp:
154 | """Return an app with lifespan event managed automatically by pytest."""
155 | app = app_fct(*args, **kwargs)
156 | return await lifespan_manager_fixture(app)
157 |
158 | return wappered_app_fct
159 |
160 |
161 | @pytest.fixture
162 | def forward_http_app_fct(
163 | lifespan_manager: LifespanManagerFixture,
164 | ): # -> AppFactoryFixture
165 | """Return wrapped `fastapi_proxy_lib.fastapi.app.forward_http_app()`.
166 |
167 | The lifespan of app returned by original `forward_http_app()` will be managed automatically by pytest.
168 | """
169 | return _app_fct_life_wapper(forward_http_app, lifespan_manager)
170 |
171 |
172 | @pytest.fixture
173 | def reverse_http_app_fct(
174 | lifespan_manager: LifespanManagerFixture,
175 | ): # -> AppFactoryFixture
176 | """Return wrapped `fastapi_proxy_lib.fastapi.app.reverse_http_app()`.
177 |
178 | The lifespan of app returned by original `reverse_http_app()` will be managed automatically by pytest.
179 | """
180 | return _app_fct_life_wapper(reverse_http_app, lifespan_manager)
181 |
182 |
183 | @pytest.fixture
184 | def reverse_ws_app_fct(
185 | lifespan_manager: LifespanManagerFixture,
186 | ): # -> AppFactoryFixture
187 | """Return wrapped `fastapi_proxy_lib.fastapi.app.reverse_ws_app()`.
188 |
189 | The lifespan of app returned by original `reverse_ws_app()` will be managed automatically by pytest.
190 | """
191 | return _app_fct_life_wapper(reverse_ws_app, lifespan_manager)
192 |
193 |
194 | @pytest.fixture
195 | async def uvicorn_server_fixture() -> AsyncIterator[UvicornServerFixture]:
196 | """Fixture for UvicornServer.
197 |
198 | Will launch and shutdown automatically.
199 | """
200 | async with AsyncExitStack() as exit_stack:
201 |
202 | async def uvicorn_server_fct(
203 | config: uvicorn.Config, contx_exit_timeout: Union[int, float, None] = None
204 | ) -> UvicornServer:
205 | uvicorn_server = await exit_stack.enter_async_context(
206 | UvicornServer(config=config, contx_exit_timeout=contx_exit_timeout)
207 | )
208 | return uvicorn_server
209 |
210 | yield uvicorn_server_fct
211 |
--------------------------------------------------------------------------------
/tests/test_core_lib.py:
--------------------------------------------------------------------------------
1 | # noqa: D100
2 |
3 | import httpx
4 | import pytest
5 | from fastapi import APIRouter, FastAPI
6 | from httpx import ASGITransport
7 | from starlette.responses import JSONResponse
8 |
9 | from fastapi_proxy_lib.core._tool import (
10 | BaseURLError,
11 | ErrMsg,
12 | check_base_url,
13 | default_proxy_filter,
14 | return_err_msg_response,
15 | )
16 | from fastapi_proxy_lib.core.http import ReverseHttpProxy
17 | from fastapi_proxy_lib.fastapi.app import forward_http_app, reverse_http_app
18 | from fastapi_proxy_lib.fastapi.router import RouterHelper
19 |
20 | from .tool import DEFAULT_URL
21 |
22 |
23 | def test_base_url_cheking_when_init() -> None:
24 | """测试实例化时不符合输入要求的base_url实参报错."""
25 | # 不以 `"/"` 结尾
26 | with pytest.raises(BaseURLError):
27 | reverse_http_app(base_url="http://www.echo.com/path")
28 | with pytest.raises(BaseURLError):
29 | reverse_http_app(base_url="http://www.echo.com")
30 | # 没有netloc
31 | with pytest.raises(BaseURLError):
32 | reverse_http_app(base_url="http://")
33 | # 没有scheme
34 | with pytest.raises(BaseURLError):
35 | reverse_http_app(base_url="www.echo.com")
36 | # 有query或fragment
37 | with pytest.warns():
38 | reverse_http_app(base_url="http://www.echo.com/?q=foo")
39 | with pytest.warns():
40 | reverse_http_app(base_url="http://www.echo.com/#foo")
41 |
42 | # 测试是否能正确规范化
43 | with pytest.warns():
44 | assert check_base_url("http://www.echo.com/?p=1#foo") == "http://www.echo.com/"
45 |
46 |
47 | @pytest.mark.anyio
48 | async def test_func_return_err_msg_response() -> None:
49 | """Test `fastapi_proxy_lib.core._tool.return_err_msg_response()`."""
50 |
51 | class FooError(Exception):
52 | pass
53 |
54 | test_error = FooError("bar")
55 | test_err_msg = ErrMsg(err_type="FooError", msg="bar")
56 |
57 | app = FastAPI()
58 |
59 | @app.get("/exception")
60 | async def _() -> JSONResponse:
61 | return return_err_msg_response(test_error, status_code=0)
62 |
63 | @app.get("/mapping")
64 | async def _() -> JSONResponse:
65 | return return_err_msg_response(test_err_msg, status_code=0)
66 |
67 | # 测试是否满足以下规范
68 | # {
69 | # "detail": {
70 | # "err_type": "RuntimeError",
71 | # "msg": "Something wrong."
72 | # }
73 | # }
74 |
75 | client = httpx.AsyncClient(
76 | transport=ASGITransport(app), base_url="http://www.example.com"
77 | )
78 | resp = await client.get("http://www.example.com/exception")
79 | assert resp.status_code == 0
80 | assert resp.json()["detail"] == test_err_msg
81 | resp = await client.get("/mapping")
82 | assert resp.status_code == 0
83 | assert resp.json()["detail"] == test_err_msg
84 |
85 | # 如果只传入 _msg 或者 _exc_info,但不传入 logger,会有警告
86 | with pytest.warns():
87 | return_err_msg_response(test_error, status_code=0, _msg="foo")
88 | with pytest.warns():
89 | return_err_msg_response(test_error, status_code=0, _exc_info=test_error)
90 |
91 |
92 | def test_func_default_proxy_filter() -> None:
93 | """Test `fastapi_proxy_lib.core._tool.default_proxy_filter()`."""
94 | # prevent access to private ip
95 |
96 | def _check(url: str, should_pass: bool) -> None:
97 | httpx_url = httpx.URL(url)
98 | if should_pass:
99 | assert default_proxy_filter(httpx_url) is None
100 | else:
101 | assert default_proxy_filter(httpx_url) is not None
102 |
103 | def should_pass(url: str) -> None:
104 | _check(url, True)
105 |
106 | def should_not_pass(url: str) -> None:
107 | _check(url, False)
108 |
109 | # passed
110 | should_pass("http://www.example.com")
111 | should_pass("http://www.example.com/path")
112 | should_pass("http://1.1.1.1")
113 |
114 | # private ip
115 | should_not_pass("http://127.0.0.1")
116 | should_not_pass("http://[::1]")
117 | should_not_pass("http://192.168.0.1")
118 | should_not_pass("http://10.0.0.1")
119 | should_not_pass("http://172.31.0.1")
120 | should_not_pass("http://localhost")
121 |
122 |
123 | def test_non_filter_warning_for_forward_proxy() -> None:
124 | """Forward proxy中未指定 proxy filter 会发出警告."""
125 | with pytest.warns():
126 | forward_http_app()
127 |
128 |
129 | def test_duplicate_router_warning() -> None:
130 | """重复注册代理在同一个Router中会发出警告."""
131 | helper = RouterHelper()
132 |
133 | proxy0 = ReverseHttpProxy(base_url=DEFAULT_URL)
134 | proxy1 = ReverseHttpProxy(base_url=DEFAULT_URL)
135 |
136 | router0 = APIRouter()
137 | router1 = APIRouter()
138 |
139 | helper.register_router(proxy0, router0)
140 | # 使用同一个router进行注册会发出警告
141 | with pytest.warns():
142 | helper.register_router(proxy1, router0)
143 |
144 | anything_except_router = object()
145 | with pytest.raises(TypeError):
146 | helper.register_router(
147 | anything_except_router, # pyright: ignore[reportCallIssue, reportArgumentType]
148 | router1,
149 | )
150 |
--------------------------------------------------------------------------------
/tests/test_docs_examples.py:
--------------------------------------------------------------------------------
1 | """Test examples in code docs."""
2 |
3 |
4 | def test_forward_http_proxy() -> None:
5 | """测试 ForwardHttpProxy 中的例子."""
6 | from collections.abc import AsyncIterator
7 | from contextlib import asynccontextmanager
8 |
9 | from fastapi import FastAPI
10 | from httpx import AsyncClient
11 | from starlette.requests import Request
12 |
13 | from fastapi_proxy_lib.core.http import ForwardHttpProxy
14 | from fastapi_proxy_lib.core.tool import default_proxy_filter
15 |
16 | proxy = ForwardHttpProxy(AsyncClient(), proxy_filter=default_proxy_filter)
17 |
18 | @asynccontextmanager
19 | async def close_proxy_event(_: FastAPI) -> AsyncIterator[None]:
20 | """Close proxy."""
21 | yield
22 | await proxy.aclose()
23 |
24 | app = FastAPI(lifespan=close_proxy_event)
25 |
26 | @app.get("/{path:path}")
27 | async def _(request: Request, path: str = ""):
28 | return await proxy.proxy(request=request, path=path)
29 |
30 | # Then run shell: `uvicorn :app --host http://127.0.0.1:8000 --port 8000`
31 | # visit the app: `http://127.0.0.1:8000/http://www.example.com`
32 | # you will get the response from `http://www.example.com`
33 |
34 |
35 | def test_reverse_http_proxy() -> None:
36 | """测试 ReverseHttpProxy 中的例子."""
37 | from collections.abc import AsyncIterator
38 | from contextlib import asynccontextmanager
39 |
40 | from fastapi import FastAPI
41 | from httpx import AsyncClient
42 | from starlette.requests import Request
43 |
44 | from fastapi_proxy_lib.core.http import ReverseHttpProxy
45 |
46 | proxy = ReverseHttpProxy(AsyncClient(), base_url="http://www.example.com/")
47 |
48 | @asynccontextmanager
49 | async def close_proxy_event(_: FastAPI) -> AsyncIterator[None]: # (1)!
50 | """Close proxy."""
51 | yield
52 | await proxy.aclose()
53 |
54 | app = FastAPI(lifespan=close_proxy_event)
55 |
56 | @app.get("/{path:path}") # (2)!
57 | async def _(request: Request, path: str = ""):
58 | return await proxy.proxy(request=request, path=path) # (3)!
59 |
60 | # Then run shell: `uvicorn :app --host http://127.0.0.1:8000 --port 8000`
61 | # visit the app: `http://127.0.0.1:8000/`
62 | # you will get the response from `http://www.example.com/`
63 |
64 | """ 1. lifespan please refer to [starlette/lifespan](https://www.starlette.io/lifespan/)
65 | 2. `{path:path}` is the key.
66 | It allows the app to accept all path parameters.
67 | visit for more info.
68 | 3. !!! info
69 | In fact, you only need to pass the `request: Request` argument.
70 | `fastapi_proxy_lib` can automatically get the `path` from `request`.
71 | Explicitly pointing it out here is just to remind you not to forget to specify `{path:path}`. """
72 |
73 |
74 | def test_reverse_ws_proxy() -> None:
75 | """测试 ReverseWebSocketProxy 中的例子."""
76 | from collections.abc import AsyncIterator
77 | from contextlib import asynccontextmanager
78 |
79 | from fastapi import FastAPI
80 | from httpx import AsyncClient
81 | from starlette.websockets import WebSocket
82 |
83 | from fastapi_proxy_lib.core.websocket import ReverseWebSocketProxy
84 |
85 | proxy = ReverseWebSocketProxy(AsyncClient(), base_url="ws://echo.websocket.events/")
86 |
87 | @asynccontextmanager
88 | async def close_proxy_event(_: FastAPI) -> AsyncIterator[None]:
89 | """Close proxy."""
90 | yield
91 | await proxy.aclose()
92 |
93 | app = FastAPI(lifespan=close_proxy_event)
94 |
95 | @app.websocket("/{path:path}")
96 | async def _(websocket: WebSocket, path: str = ""):
97 | return await proxy.proxy(websocket=websocket, path=path)
98 |
99 | # Then run shell: `uvicorn :app --host http://127.0.0.1:8000 --port 8000`
100 | # visit the app: `ws://127.0.0.1:8000/`
101 | # you can establish websocket connection with `ws://echo.websocket.events`
102 |
103 |
104 | def test_router_helper() -> None:
105 | """测试 RouterHelper 中的例子."""
106 | from fastapi import APIRouter, FastAPI
107 |
108 | from fastapi_proxy_lib.core.http import ForwardHttpProxy, ReverseHttpProxy
109 | from fastapi_proxy_lib.core.tool import default_proxy_filter
110 | from fastapi_proxy_lib.core.websocket import ReverseWebSocketProxy
111 | from fastapi_proxy_lib.fastapi.router import RouterHelper
112 |
113 | reverse_http_proxy = ReverseHttpProxy(base_url="http://www.example.com/")
114 | reverse_ws_proxy = ReverseWebSocketProxy(base_url="ws://echo.websocket.events/")
115 | forward_http_proxy = ForwardHttpProxy(proxy_filter=default_proxy_filter)
116 |
117 | helper = RouterHelper()
118 |
119 | reverse_http_router = helper.register_router(
120 | reverse_http_proxy,
121 | APIRouter(prefix="/reverse"), # (1)!
122 | )
123 | forward_http_router = helper.register_router(
124 | forward_http_proxy,
125 | APIRouter(prefix="/forward"),
126 | )
127 | reverse_ws_router = helper.register_router(reverse_ws_proxy) # (2)!
128 |
129 | app = FastAPI(lifespan=helper.get_lifespan()) # (3)!
130 |
131 | app.include_router(reverse_http_router, prefix="/http") # (4)!
132 | app.include_router(forward_http_router, prefix="/http")
133 | app.include_router(reverse_ws_router, prefix="/ws")
134 |
135 | # reverse http proxy on "/http/reverse"
136 | # forward http proxy on "/http/forward"
137 | # reverse websocket proxy on "/ws"
138 |
139 | """ 1. You can pass any arguments to [`APIRouter()`][fastapi.APIRouter] if you want.
140 | 2. Or, with default values, `RouterHelper` will create a new router for you.
141 | 3. Registering a lifespan event to close all proxies is a recommended action.
142 | 4. You can use the proxy router just like a normal `APIRouter`. """
143 |
--------------------------------------------------------------------------------
/tests/test_http.py:
--------------------------------------------------------------------------------
1 | # noqa: D100
2 |
3 |
4 | import logging
5 |
6 | import httpx
7 | import pytest
8 | from httpx import ASGITransport
9 | from typing_extensions import override
10 |
11 | from fastapi_proxy_lib.core.tool import default_proxy_filter
12 |
13 | from .conftest import AppFactoryFixture, LifeAppDataclass4Test
14 | from .tool import (
15 | DEFAULT_URL,
16 | PRIVATE_IP_URL,
17 | WRONG_PROTO_URL,
18 | AbstractTestProxy,
19 | Tool4TestFixture,
20 | check_if_err_resp_is_from_px_serv,
21 | )
22 |
23 | DEFAULT_TARGET_SERVER_BASE_URL = "http://www.echo.com/"
24 | DEFAULT_PROXY_SERVER_BASE_URL = "http://www.proxy.com/"
25 |
26 |
27 | class TestReverseHttpProxy(AbstractTestProxy):
28 | """For testing reverse http proxy."""
29 |
30 | @override
31 | @pytest.fixture
32 | async def tool_4_test_fixture( # pyright: ignore[reportIncompatibleMethodOverride]
33 | self,
34 | echo_http_test_model: LifeAppDataclass4Test,
35 | reverse_http_app_fct: AppFactoryFixture,
36 | ) -> Tool4TestFixture:
37 | """目标服务器请参考`tests.app.echo_http_app.get_app`."""
38 | client_for_conn_to_target_server = httpx.AsyncClient(
39 | transport=ASGITransport(echo_http_test_model.app),
40 | base_url=DEFAULT_TARGET_SERVER_BASE_URL,
41 | )
42 |
43 | reverse_http_app = await reverse_http_app_fct(
44 | client=client_for_conn_to_target_server,
45 | base_url=DEFAULT_TARGET_SERVER_BASE_URL,
46 | )
47 |
48 | client_for_conn_to_proxy_server = httpx.AsyncClient(
49 | transport=ASGITransport(reverse_http_app),
50 | base_url=DEFAULT_PROXY_SERVER_BASE_URL,
51 | )
52 |
53 | get_request = echo_http_test_model.get_request
54 |
55 | return Tool4TestFixture(
56 | client_for_conn_to_target_server=client_for_conn_to_target_server,
57 | client_for_conn_to_proxy_server=client_for_conn_to_proxy_server,
58 | get_request=get_request,
59 | target_server_base_url=DEFAULT_TARGET_SERVER_BASE_URL,
60 | proxy_server_base_url=DEFAULT_PROXY_SERVER_BASE_URL,
61 | )
62 |
63 | @pytest.mark.anyio
64 | async def test_all_request_methods(
65 | self, tool_4_test_fixture: Tool4TestFixture
66 | ) -> None:
67 | """测试是否所有的请求方法都能正常工作."""
68 | client_for_conn_to_proxy_server = (
69 | tool_4_test_fixture.client_for_conn_to_proxy_server
70 | )
71 | proxy_server_base_url = tool_4_test_fixture.proxy_server_base_url
72 |
73 | resp_lst = (
74 | await client_for_conn_to_proxy_server.get(proxy_server_base_url),
75 | await client_for_conn_to_proxy_server.post(proxy_server_base_url),
76 | await client_for_conn_to_proxy_server.put(proxy_server_base_url),
77 | await client_for_conn_to_proxy_server.head(proxy_server_base_url),
78 | await client_for_conn_to_proxy_server.options(proxy_server_base_url),
79 | await client_for_conn_to_proxy_server.delete(proxy_server_base_url),
80 | await client_for_conn_to_proxy_server.patch(proxy_server_base_url),
81 | )
82 | assert all(resp.is_success for resp in resp_lst)
83 |
84 | @pytest.mark.anyio
85 | async def test_if_the_header_is_properly_handled(
86 | self, tool_4_test_fixture: Tool4TestFixture
87 | ) -> None:
88 | """测试是否正确处理请求头."""
89 | client_for_conn_to_proxy_server = (
90 | tool_4_test_fixture.client_for_conn_to_proxy_server
91 | )
92 | proxy_server_base_url = tool_4_test_fixture.proxy_server_base_url
93 | target_server_base_url = tool_4_test_fixture.target_server_base_url
94 |
95 | ########## 测试 keep_alive 检查点 ##########
96 |
97 | # 客户端关闭连接请求 和 常规操作:
98 | # 1.无损转发请求头至目标服务器
99 | # 2.正确处理 host 请求头
100 |
101 | proxy_resp = await client_for_conn_to_proxy_server.head(
102 | proxy_server_base_url + "head/return_keep_alive_headers",
103 | headers={
104 | "foo": "bar",
105 | "Connection": "close",
106 | },
107 | )
108 |
109 | target_server_recv_request = tool_4_test_fixture.get_request()
110 |
111 | # 测试是否尊重客户端关闭连接请求
112 | assert "close" in proxy_resp.headers["connection"]
113 |
114 | # 测试是否无损转发请求头至目标服务器
115 | assert target_server_recv_request.headers["foo"] == "bar"
116 |
117 | # 测试是否代理服务器强制发送"connection: keep-alive"请求头至目标服务器
118 | assert "keep-alive" in target_server_recv_request.headers["connection"]
119 |
120 | # 测试是否正确处理 host 请求头
121 | assert target_server_recv_request.headers["host"] == httpx.URL(
122 | target_server_base_url
123 | ).netloc.decode("ascii")
124 |
125 | # 客户端保活请求
126 |
127 | proxy_resp = await client_for_conn_to_proxy_server.head(
128 | proxy_server_base_url + "head/return_keep_alive_headers",
129 | headers={
130 | "Connection": "keep-alive",
131 | "Keep-Alive": "timeout=5, max=1000",
132 | },
133 | )
134 | target_server_recv_request = tool_4_test_fixture.get_request()
135 | # 测试是否屏蔽了 keep-alive 请求头
136 | assert "keep-alive" not in target_server_recv_request.headers
137 |
138 | ########## 测试 close_connection 检查点 ##########
139 |
140 | # 测试是否尊重客户端保活连接请求
141 | proxy_resp = await client_for_conn_to_proxy_server.head(
142 | proxy_server_base_url + "head/return_close_connection_headers",
143 | headers={
144 | "Connection": "keep-alive",
145 | "Keep-Alive": "timeout=5, max=1000",
146 | },
147 | )
148 | assert (
149 | "connection" not in proxy_resp.headers
150 | or "close" not in proxy_resp.headers["connection"]
151 | )
152 |
153 | # 测试是否尊重客户端关闭连接请求
154 | proxy_resp = await client_for_conn_to_proxy_server.head(
155 | proxy_server_base_url + "head/return_close_connection_headers",
156 | headers={
157 | "Connection": "close",
158 | },
159 | )
160 | assert "close" in proxy_resp.headers["connection"]
161 |
162 | @pytest.mark.anyio
163 | async def test_if_the_multiple_query_params_forwarding_is_correct(
164 | self, tool_4_test_fixture: Tool4TestFixture
165 | ) -> None:
166 | """See: ."""
167 | client_for_conn_to_proxy_server = (
168 | tool_4_test_fixture.client_for_conn_to_proxy_server
169 | )
170 | proxy_server_base_url = tool_4_test_fixture.proxy_server_base_url
171 |
172 | query_params = httpx.QueryParams(
173 | [
174 | ("key1", "value1"),
175 | # NOTE: following two keys are same
176 | ("key2", "value2"),
177 | ("key2", "value3"),
178 | ]
179 | )
180 | # We only need to send the query_params to any endpoint
181 | await client_for_conn_to_proxy_server.get(
182 | proxy_server_base_url + "get/echo_headers_and_params",
183 | params=query_params,
184 | )
185 |
186 | target_server_recv_request = tool_4_test_fixture.get_request()
187 |
188 | # Check is the multiple query_params are forwarded correctly
189 | assert sorted(target_server_recv_request.query_params.multi_items()) == sorted(
190 | query_params.multi_items()
191 | )
192 |
193 | @pytest.mark.anyio
194 | async def test_if_the_proxy_forwarding_is_correct(
195 | self, tool_4_test_fixture: Tool4TestFixture
196 | ) -> None:
197 | """测试代理服务器的转发功能是否正常."""
198 | client_for_conn_to_proxy_server = (
199 | tool_4_test_fixture.client_for_conn_to_proxy_server
200 | )
201 | proxy_server_base_url = tool_4_test_fixture.proxy_server_base_url
202 |
203 | # 测试目标服务器响应体转发正常
204 | r = await client_for_conn_to_proxy_server.get(
205 | proxy_server_base_url + "get/echo_headers_and_params",
206 | headers={"foo": "bar"},
207 | )
208 | assert r.json()["foo"] == "bar"
209 |
210 | # 测试客户端请求体转发正常
211 | r = await client_for_conn_to_proxy_server.post(
212 | proxy_server_base_url + "post/echo_body",
213 | json={"foo": "bar"},
214 | )
215 | assert r.json()["foo"] == "bar"
216 |
217 | # 测试目标服务文件转发正常
218 | file_str = "你好"
219 | r = await client_for_conn_to_proxy_server.put(
220 | proxy_server_base_url + f"put/echo_file?content={file_str}",
221 | )
222 | assert r.content.decode("utf-8") == file_str
223 |
224 | @pytest.mark.anyio
225 | async def test_bad_url_request(
226 | self,
227 | reverse_http_app_fct: AppFactoryFixture,
228 | ) -> None:
229 | """测试坏URL请求的报错功能."""
230 | client_for_conn_to_target_server = httpx.AsyncClient()
231 |
232 | reverse_http_app = await reverse_http_app_fct(
233 | client=client_for_conn_to_target_server,
234 | base_url=WRONG_PROTO_URL,
235 | )
236 |
237 | client_for_conn_to_proxy_server = httpx.AsyncClient(
238 | transport=ASGITransport(reverse_http_app),
239 | base_url=DEFAULT_PROXY_SERVER_BASE_URL,
240 | )
241 |
242 | r = await client_for_conn_to_proxy_server.get(DEFAULT_PROXY_SERVER_BASE_URL)
243 | assert r.status_code == 502
244 | check_if_err_resp_is_from_px_serv(r)
245 |
246 | @pytest.mark.anyio
247 | async def test_cookie_leakage(
248 | self,
249 | tool_4_test_fixture: Tool4TestFixture,
250 | ) -> None:
251 | """Testing for fixing cookie leakage vulnerabilities."""
252 | client_for_conn_to_proxy_server = (
253 | tool_4_test_fixture.client_for_conn_to_proxy_server
254 | )
255 | proxy_server_base_url = tool_4_test_fixture.proxy_server_base_url
256 |
257 | # request to set cookie: foo=bar
258 | await client_for_conn_to_proxy_server.get(
259 | proxy_server_base_url + "get/cookies/set/foo/bar"
260 | )
261 | # check if cookie is set
262 | assert client_for_conn_to_proxy_server.cookies["foo"] == "bar"
263 | r = await client_for_conn_to_proxy_server.get(
264 | proxy_server_base_url + "get/cookies"
265 | )
266 | assert r.json()["foo"] == "bar"
267 |
268 | # Then simulate the access of another user's client by clearing cookiejar
269 | client_for_conn_to_proxy_server.cookies.clear()
270 | # check if cookiejar is cleared
271 | assert not client_for_conn_to_proxy_server.cookies
272 |
273 | # check if cookie is not leaked
274 | r = await client_for_conn_to_proxy_server.get(
275 | proxy_server_base_url + "get/cookies",
276 | cookies={"a": "b"},
277 | )
278 | assert "foo" not in r.json() # not leaked
279 | assert r.json()["a"] == "b" # send cookies normally
280 |
281 | @pytest.mark.anyio
282 | async def test_no_logging_basic_config_call(
283 | self, tool_4_test_fixture: Tool4TestFixture, monkeypatch: pytest.MonkeyPatch
284 | ) -> None:
285 | """Test that we don't accidentally call `logging.basicConfig()`.
286 |
287 | See issue #45
288 | """
289 | root = logging.getLogger()
290 | monkeypatch.setattr(root, "handlers", [])
291 |
292 | client_for_conn_to_proxy_server = (
293 | tool_4_test_fixture.client_for_conn_to_proxy_server
294 | )
295 | proxy_server_base_url = tool_4_test_fixture.proxy_server_base_url
296 |
297 | resp = await client_for_conn_to_proxy_server.get(proxy_server_base_url)
298 | assert resp.is_success
299 |
300 | assert not root.handlers, "logging handler added"
301 |
302 |
303 | class TestForwardHttpProxy(AbstractTestProxy):
304 | """For testing forward http proxy."""
305 |
306 | @pytest.fixture
307 | async def tool_4_test_fixture( # pyright: ignore[reportIncompatibleMethodOverride]
308 | self,
309 | echo_http_test_model: LifeAppDataclass4Test,
310 | forward_http_app_fct: AppFactoryFixture,
311 | ) -> Tool4TestFixture:
312 | """目标服务器请参考`tests.app.echo_http_app.get_app`."""
313 | client_for_conn_to_target_server = httpx.AsyncClient(
314 | transport=ASGITransport(echo_http_test_model.app),
315 | base_url=DEFAULT_TARGET_SERVER_BASE_URL,
316 | )
317 |
318 | forward_http_app = await forward_http_app_fct(
319 | client=client_for_conn_to_target_server, proxy_filter=default_proxy_filter
320 | )
321 |
322 | client_for_conn_to_proxy_server = httpx.AsyncClient(
323 | transport=ASGITransport(forward_http_app),
324 | base_url=DEFAULT_PROXY_SERVER_BASE_URL,
325 | )
326 |
327 | get_request = echo_http_test_model.get_request
328 |
329 | return Tool4TestFixture(
330 | client_for_conn_to_target_server=client_for_conn_to_target_server,
331 | client_for_conn_to_proxy_server=client_for_conn_to_proxy_server,
332 | get_request=get_request,
333 | target_server_base_url=DEFAULT_TARGET_SERVER_BASE_URL,
334 | proxy_server_base_url=DEFAULT_PROXY_SERVER_BASE_URL,
335 | )
336 |
337 | @pytest.mark.anyio
338 | async def test_all_request_methods(
339 | self, tool_4_test_fixture: Tool4TestFixture
340 | ) -> None:
341 | """测试是否所有的请求方法都能正常工作."""
342 | client_for_conn_to_proxy_server = (
343 | tool_4_test_fixture.client_for_conn_to_proxy_server
344 | )
345 | proxy_server_base_url = tool_4_test_fixture.proxy_server_base_url
346 | target_server_base_url = tool_4_test_fixture.target_server_base_url
347 |
348 | test_url = proxy_server_base_url + target_server_base_url
349 |
350 | resp_lst = (
351 | await client_for_conn_to_proxy_server.get(test_url),
352 | await client_for_conn_to_proxy_server.post(test_url),
353 | await client_for_conn_to_proxy_server.put(test_url),
354 | await client_for_conn_to_proxy_server.head(test_url),
355 | await client_for_conn_to_proxy_server.options(test_url),
356 | await client_for_conn_to_proxy_server.delete(test_url),
357 | await client_for_conn_to_proxy_server.patch(test_url),
358 | )
359 | assert all(resp.is_success for resp in resp_lst)
360 |
361 | @pytest.mark.anyio
362 | async def test_bad_url_request(
363 | self,
364 | forward_http_app_fct: AppFactoryFixture,
365 | ) -> None:
366 | """测试坏URL请求的报错功能."""
367 | client_for_conn_to_target_server = httpx.AsyncClient()
368 |
369 | forward_http_app = await forward_http_app_fct(
370 | client=client_for_conn_to_target_server, proxy_filter=default_proxy_filter
371 | )
372 |
373 | client_for_conn_to_proxy_server = httpx.AsyncClient(
374 | transport=ASGITransport(forward_http_app),
375 | base_url=DEFAULT_PROXY_SERVER_BASE_URL,
376 | )
377 |
378 | # 错误的无法发出请求的URL
379 | r = await client_for_conn_to_proxy_server.get(
380 | DEFAULT_PROXY_SERVER_BASE_URL + WRONG_PROTO_URL
381 | )
382 | assert r.status_code == 400
383 | check_if_err_resp_is_from_px_serv(r)
384 |
385 | # 空URL
386 | r = await client_for_conn_to_proxy_server.get(DEFAULT_PROXY_SERVER_BASE_URL)
387 | assert r.status_code == 400
388 | check_if_err_resp_is_from_px_serv(r)
389 |
390 | # 试图访问私有IP的URL
391 | r = await client_for_conn_to_proxy_server.get(
392 | DEFAULT_PROXY_SERVER_BASE_URL + PRIVATE_IP_URL
393 | )
394 | assert r.status_code == 403
395 | check_if_err_resp_is_from_px_serv(r)
396 |
397 | @pytest.mark.anyio
398 | async def test_500_proxy_server_internal_error(
399 | self,
400 | forward_http_app_fct: AppFactoryFixture,
401 | ) -> None:
402 | """测试代理服务网络出现问题时的报错功能."""
403 |
404 | async def connect_error_mock_handler(
405 | request: httpx.Request,
406 | ) -> httpx.Response:
407 | """模拟连接错误."""
408 | raise httpx.ConnectError(
409 | "MockTransport Raise httpx.ConnectError", request=request
410 | )
411 |
412 | client_for_conn_to_target_server = httpx.AsyncClient(
413 | transport=httpx.MockTransport(handler=connect_error_mock_handler)
414 | )
415 |
416 | forward_http_app = await forward_http_app_fct(
417 | client=client_for_conn_to_target_server, proxy_filter=default_proxy_filter
418 | )
419 |
420 | client_for_conn_to_proxy_server = httpx.AsyncClient(
421 | transport=ASGITransport(forward_http_app),
422 | base_url=DEFAULT_PROXY_SERVER_BASE_URL,
423 | )
424 |
425 | r = await client_for_conn_to_proxy_server.get(
426 | DEFAULT_PROXY_SERVER_BASE_URL + DEFAULT_URL
427 | )
428 | assert r.status_code == 500
429 | check_if_err_resp_is_from_px_serv(r)
430 |
431 | @pytest.mark.anyio
432 | async def test_denial_http2(
433 | self,
434 | forward_http_app_fct: AppFactoryFixture,
435 | ) -> None:
436 | """测试拒绝HTTP2请求的功能."""
437 | # HACK: 暂时放弃这个测试
438 | # httpx.ASGITransport 硬编码为发送 http/1.1 请求
439 | # 无法正常测试
440 | return
441 |
442 | proxy_server_base_url = self.proxy_server_base_url
443 |
444 | client_for_conn_to_target_server = httpx.AsyncClient()
445 |
446 | forward_http_app = await forward_http_app_fct(
447 | client=client_for_conn_to_target_server, proxy_filter=default_proxy_filter
448 | )
449 |
450 | client_for_conn_to_proxy_server = httpx.AsyncClient(
451 | app=forward_http_app,
452 | base_url=proxy_server_base_url,
453 | http2=True,
454 | http1=False,
455 | )
456 |
457 | # 错误的URL
458 | r = await client_for_conn_to_proxy_server.get(
459 | proxy_server_base_url + DEFAULT_URL
460 | )
461 |
462 | assert r.status_code == 505
463 | check_if_err_resp_is_from_px_serv(r)
464 |
--------------------------------------------------------------------------------
/tests/test_ws.py:
--------------------------------------------------------------------------------
1 | # noqa: D100
2 |
3 |
4 | import asyncio
5 | from contextlib import AsyncExitStack
6 | from multiprocessing import Process, Queue
7 | from typing import Any, Literal, Optional
8 |
9 | import httpx
10 | import httpx_ws
11 | import pytest
12 | import uvicorn
13 | from httpx_ws import AsyncWebSocketSession, aconnect_ws
14 | from starlette import websockets as starlette_websockets_module
15 | from typing_extensions import override
16 |
17 | from fastapi_proxy_lib.fastapi.app import reverse_ws_app as get_reverse_ws_app
18 |
19 | from .app.echo_ws_app import get_app as get_ws_test_app
20 | from .app.tool import UvicornServer
21 | from .conftest import UvicornServerFixture
22 | from .tool import (
23 | AbstractTestProxy,
24 | Tool4TestFixture,
25 | )
26 |
27 | DEFAULT_HOST = "127.0.0.1"
28 | DEFAULT_PORT = 0
29 | DEFAULT_CONTX_EXIT_TIMEOUT = 5
30 |
31 | # WS_BACKENDS_NEED_BE_TESTED = ("websockets", "wsproto")
32 | # # FIXME: wsproto 有问题,暂时不测试
33 | # # ConnectionResetError: [WinError 10054] 远程主机强迫关闭了一个现有的连接。
34 | # # https://github.com/encode/uvicorn/discussions/2105
35 | WS_BACKENDS_NEED_BE_TESTED = ("websockets",)
36 |
37 | # https://www.python-httpx.org/advanced/transports/#no-proxy-support
38 | NO_PROXIES: dict[Any, Any] = {"all://": None}
39 |
40 |
41 | def _subprocess_run_echo_ws_uvicorn_server(queue: "Queue[str]", **kwargs: Any):
42 | """Run echo ws app in subprocess.
43 |
44 | Args:
45 | queue: The queue for subprocess to put the url of echo ws app.
46 | After the server is started, the url will be put into the queue.
47 | **kwargs: The kwargs for `uvicorn.Config`
48 | """
49 | default_kwargs = {
50 | "app": get_ws_test_app().app,
51 | "port": DEFAULT_PORT,
52 | "host": DEFAULT_HOST,
53 | }
54 | default_kwargs.update(kwargs)
55 | target_ws_server = UvicornServer(
56 | uvicorn.Config(**default_kwargs), # pyright: ignore[reportArgumentType]
57 | )
58 |
59 | async def run():
60 | await target_ws_server.aenter()
61 | url = str(target_ws_server.contx_socket_url)
62 | queue.put(url)
63 | queue.close()
64 |
65 | # run forever
66 | while True: # noqa: ASYNC110 # false-positive
67 | await asyncio.sleep(0.1)
68 |
69 | asyncio.run(run())
70 |
71 |
72 | def _subprocess_run_httpx_ws(
73 | queue: "Queue[str]",
74 | kwargs_async_client: Optional[dict[str, Any]] = None,
75 | kwargs_aconnect_ws: Optional[dict[str, Any]] = None,
76 | ):
77 | """Run aconnect_ws in subprocess.
78 |
79 | Args:
80 | queue: The queue for subprocess to put something for flag of ws connection established.
81 | kwargs_async_client: The kwargs for `httpx.AsyncClient`
82 | kwargs_aconnect_ws: The kwargs for `httpx_ws.aconnect_ws`
83 | """
84 | kwargs_async_client = kwargs_async_client or {}
85 | kwargs_aconnect_ws = kwargs_aconnect_ws or {}
86 |
87 | kwargs_async_client.pop("mounts", None)
88 | kwargs_aconnect_ws.pop("client", None)
89 |
90 | async def run():
91 | _exit_stack = AsyncExitStack()
92 | _temp_client = httpx.AsyncClient(mounts=NO_PROXIES, **kwargs_async_client)
93 | # it's `httpx-ws` typing issue, so ignore it
94 | _ws_session: AsyncWebSocketSession = (
95 | aconnect_ws( # pyright: ignore[reportAssignmentType]
96 | client=_temp_client,
97 | **kwargs_aconnect_ws,
98 | )
99 | )
100 | _ = await _exit_stack.enter_async_context(_ws_session)
101 | queue.put("done")
102 | queue.close()
103 |
104 | # run forever
105 | while True: # noqa: ASYNC110 # false-positive
106 | await asyncio.sleep(0.1)
107 |
108 | asyncio.run(run())
109 |
110 |
111 | class TestReverseWsProxy(AbstractTestProxy):
112 | """For testing reverse websocket proxy."""
113 |
114 | @override
115 | @pytest.fixture(params=WS_BACKENDS_NEED_BE_TESTED)
116 | async def tool_4_test_fixture( # pyright: ignore[reportIncompatibleMethodOverride]
117 | self,
118 | uvicorn_server_fixture: UvicornServerFixture,
119 | request: pytest.FixtureRequest,
120 | ) -> Tool4TestFixture:
121 | """目标服务器请参考`tests.app.echo_ws_app.get_app`."""
122 | echo_ws_test_model = get_ws_test_app()
123 | echo_ws_app = echo_ws_test_model.app
124 | echo_ws_get_request = echo_ws_test_model.get_request
125 |
126 | target_ws_server = await uvicorn_server_fixture(
127 | uvicorn.Config(
128 | echo_ws_app, port=DEFAULT_PORT, host=DEFAULT_HOST, ws=request.param
129 | ),
130 | contx_exit_timeout=DEFAULT_CONTX_EXIT_TIMEOUT,
131 | )
132 |
133 | target_server_base_url = str(target_ws_server.contx_socket_url)
134 |
135 | client_for_conn_to_target_server = httpx.AsyncClient(mounts=NO_PROXIES)
136 |
137 | reverse_ws_app = get_reverse_ws_app(
138 | client=client_for_conn_to_target_server, base_url=target_server_base_url
139 | )
140 |
141 | proxy_ws_server = await uvicorn_server_fixture(
142 | uvicorn.Config(
143 | reverse_ws_app, port=DEFAULT_PORT, host=DEFAULT_HOST, ws=request.param
144 | ),
145 | contx_exit_timeout=DEFAULT_CONTX_EXIT_TIMEOUT,
146 | )
147 |
148 | proxy_server_base_url = str(proxy_ws_server.contx_socket_url)
149 |
150 | client_for_conn_to_proxy_server = httpx.AsyncClient(mounts=NO_PROXIES)
151 |
152 | return Tool4TestFixture(
153 | client_for_conn_to_target_server=client_for_conn_to_target_server,
154 | client_for_conn_to_proxy_server=client_for_conn_to_proxy_server,
155 | get_request=echo_ws_get_request,
156 | target_server_base_url=target_server_base_url,
157 | proxy_server_base_url=proxy_server_base_url,
158 | )
159 |
160 | @pytest.mark.anyio
161 | async def test_ws_proxy(self, tool_4_test_fixture: Tool4TestFixture) -> None:
162 | """测试websocket代理."""
163 | proxy_server_base_url = tool_4_test_fixture.proxy_server_base_url
164 | client_for_conn_to_proxy_server = (
165 | tool_4_test_fixture.client_for_conn_to_proxy_server
166 | )
167 | get_request = tool_4_test_fixture.get_request
168 |
169 | ########## 测试数据的正常转发 ##########
170 |
171 | async with aconnect_ws(
172 | proxy_server_base_url + "echo_text", client_for_conn_to_proxy_server
173 | ) as ws:
174 | await ws.send_text("foo")
175 | assert await ws.receive_text() == "foo"
176 |
177 | async with aconnect_ws(
178 | proxy_server_base_url + "echo_bytes", client_for_conn_to_proxy_server
179 | ) as ws:
180 | await ws.send_bytes(b"foo")
181 | assert await ws.receive_bytes() == b"foo"
182 |
183 | ########## Test multiple query params ##########
184 | # see:
185 |
186 | query_params = httpx.QueryParams(
187 | [
188 | ("key1", "value1"),
189 | # NOTE: following two keys are same
190 | ("key2", "value2"),
191 | ("key2", "value3"),
192 | ]
193 | )
194 | # We only need to send the query_params to any endpoint
195 | async with aconnect_ws(
196 | proxy_server_base_url + "just_close_with_1001",
197 | client_for_conn_to_proxy_server,
198 | params=query_params,
199 | ):
200 | pass
201 |
202 | target_starlette_ws = get_request()
203 | # Check is the multiple query_params are forwarded correctly
204 | assert sorted(target_starlette_ws.query_params.multi_items()) == sorted(
205 | query_params.multi_items()
206 | )
207 |
208 | ########## 测试子协议 ##########
209 |
210 | async with aconnect_ws(
211 | proxy_server_base_url + "accept_foo_subprotocol",
212 | client_for_conn_to_proxy_server,
213 | subprotocols=["foo", "bar"],
214 | ) as ws:
215 | assert ws.subprotocol == "foo"
216 |
217 | ########## 关闭代码 ##########
218 |
219 | async with aconnect_ws(
220 | proxy_server_base_url + "just_close_with_1001",
221 | client_for_conn_to_proxy_server,
222 | ) as ws:
223 | with pytest.raises(httpx_ws.WebSocketDisconnect) as exce:
224 | await ws.receive_text()
225 | assert exce.value.code == 1001
226 |
227 | ########## 协议升级失败或者连接失败 ##########
228 |
229 | with pytest.raises(httpx_ws.WebSocketUpgradeError) as exce:
230 | async with aconnect_ws(
231 | proxy_server_base_url + "reject_handshake",
232 | client_for_conn_to_proxy_server,
233 | ) as ws:
234 | pass
235 | # uvicorn 服务器在未调用`websocket.accept()`之前调用了`websocket.close()`,会发生403
236 | assert exce.value.response.status_code == 403
237 |
238 | ########## 客户端突然关闭时,服务器应该收到1011 ##########
239 |
240 | # NOTE: 这个测试不放在 `test_target_server_shutdown_abnormally` 来做
241 | # 是因为这里已经有现成的target server,放在这里测试可以节省启动服务器时间
242 |
243 | aconnect_ws_subprocess_queue: Queue[str] = Queue()
244 |
245 | kwargs_async_client = {"mounts": NO_PROXIES}
246 | kwargs_aconnect_ws = {"url": proxy_server_base_url + "do_nothing"}
247 | kwargs = {
248 | "kwargs_async_client": kwargs_async_client,
249 | "kwargs_aconnect_ws": kwargs_aconnect_ws,
250 | }
251 |
252 | aconnect_ws_subprocess = Process(
253 | target=_subprocess_run_httpx_ws,
254 | args=(aconnect_ws_subprocess_queue,),
255 | kwargs=kwargs,
256 | )
257 | aconnect_ws_subprocess.start()
258 |
259 | # 避免从队列中get导致的异步阻塞
260 | while ( # noqa: ASYNC110 # `multiprocessing.Queue` is not awaitable
261 | aconnect_ws_subprocess_queue.empty()
262 | ):
263 | await asyncio.sleep(0.1)
264 | _ = aconnect_ws_subprocess_queue.get() # 获取到了即代表连接建立成功
265 |
266 | # force shutdown client
267 | aconnect_ws_subprocess.terminate()
268 | aconnect_ws_subprocess.kill()
269 | # https://pytest-cov.readthedocs.io/en/latest/subprocess-support.html#if-you-use-multiprocessing-process
270 | aconnect_ws_subprocess.join() # dont forget this, pytest-cov requires this
271 |
272 | target_starlette_ws = get_request()
273 | assert isinstance(target_starlette_ws, starlette_websockets_module.WebSocket)
274 | with pytest.raises(starlette_websockets_module.WebSocketDisconnect) as exce:
275 | await target_starlette_ws.receive_text() # receive_bytes() 也可以
276 |
277 | # assert exce.value.code == 1011
278 | # HACK, FIXME: 无法测试错误代码,似乎无法正常传递,且不同后端也不同
279 | # FAILED test_ws_proxy[websockets] - assert 1005 == 1011
280 | # FAILED test_ws_proxy[wsproto] - assert == 1011
281 |
282 | # FIXME: 调查为什么收到关闭代码需要40s
283 | @pytest.mark.timeout(60)
284 | @pytest.mark.anyio
285 | @pytest.mark.parametrize("ws_backend", WS_BACKENDS_NEED_BE_TESTED)
286 | async def test_target_server_shutdown_abnormally(
287 | self, ws_backend: Literal["websockets", "wsproto"]
288 | ) -> None:
289 | """测试因为目标服务器突然断连导致的,ws桥接异常关闭.
290 |
291 | 需要在 60s 内向客户端发送 1011 关闭代码.
292 | """
293 | subprocess_queue: Queue[str] = Queue()
294 |
295 | target_ws_server_subprocess = Process(
296 | target=_subprocess_run_echo_ws_uvicorn_server,
297 | args=(subprocess_queue,),
298 | kwargs={"port": DEFAULT_PORT, "host": DEFAULT_HOST, "ws": ws_backend},
299 | )
300 | target_ws_server_subprocess.start()
301 |
302 | # 避免从队列中get导致的异步阻塞
303 | while ( # noqa: ASYNC110 # `multiprocessing.Queue` is not awaitable
304 | subprocess_queue.empty()
305 | ):
306 | await asyncio.sleep(0.1)
307 | target_server_base_url = subprocess_queue.get()
308 |
309 | client_for_conn_to_target_server = httpx.AsyncClient(mounts=NO_PROXIES)
310 |
311 | reverse_ws_app = get_reverse_ws_app(
312 | client=client_for_conn_to_target_server, base_url=target_server_base_url
313 | )
314 |
315 | async with UvicornServer(
316 | uvicorn.Config(
317 | reverse_ws_app, port=DEFAULT_PORT, host=DEFAULT_HOST, ws=ws_backend
318 | )
319 | ) as proxy_ws_server:
320 | proxy_server_base_url = str(proxy_ws_server.contx_socket_url)
321 |
322 | async with aconnect_ws(
323 | proxy_server_base_url + "do_nothing",
324 | httpx.AsyncClient(mounts=NO_PROXIES),
325 | ) as ws:
326 | loop = asyncio.get_running_loop()
327 |
328 | # force shutdown target server
329 | target_ws_server_subprocess.terminate()
330 | target_ws_server_subprocess.kill()
331 |
332 | start = loop.time()
333 |
334 | with pytest.raises(httpx_ws.WebSocketDisconnect) as exce:
335 | await ws.receive()
336 | assert exce.value.code == 1011
337 |
338 | end = loop.time()
339 | # we require the proxy server to send 1011 in 60s
340 | assert end - start < 60
341 |
342 | # https://pytest-cov.readthedocs.io/en/latest/subprocess-support.html#if-you-use-multiprocessing-process
343 | target_ws_server_subprocess.join() # dont forget this, pytest-cov requires this
344 |
--------------------------------------------------------------------------------
/tests/tool.py:
--------------------------------------------------------------------------------
1 | # noqa: D100
2 |
3 | import abc
4 | from dataclasses import dataclass
5 | from typing import Callable
6 |
7 | import httpx
8 | import pytest
9 |
10 | from .app.tool import ServerRecvRequestsTypes
11 |
12 | WRONG_PROTO_URL = "wrong://wrong.fastapi_proxy_test.wrong/"
13 | WRONG_ASCII_URL = "http://www.\n.com/"
14 | DEFAULT_WRONG_URL = "http://wrong.fastapi_proxy_test.wrong/"
15 | PRIVATE_IP_URL = "http://127.0.0.1/"
16 | DEFAULT_URL = "http://www.example.com/"
17 |
18 |
19 | @dataclass
20 | class Tool4TestFixture:
21 | """Tool for test server.
22 |
23 | Attributes:
24 | client_for_conn_to_target_server: The client for connecting to target server.
25 | client_for_conn_to_proxy_server: The client for connecting to proxy server.
26 | get_request: Get the latest original http/websocket request from the client.
27 | target_server_base_url: The base url of target server.
28 | proxy_server_base_url: The base url of proxy server.
29 | """
30 |
31 | client_for_conn_to_target_server: httpx.AsyncClient
32 | client_for_conn_to_proxy_server: httpx.AsyncClient
33 | get_request: Callable[[], ServerRecvRequestsTypes]
34 | target_server_base_url: str
35 | proxy_server_base_url: str
36 |
37 |
38 | class AbstractTestProxy(abc.ABC):
39 | """Abstract class for testing proxy."""
40 |
41 | @abc.abstractmethod
42 | def tool_4_test_fixture(self) -> Tool4TestFixture:
43 | """Get the tool for test server."""
44 |
45 |
46 | def check_if_err_resp_is_from_px_serv(resp: httpx.Response) -> None:
47 | """Check if the response about error info is actively sent by proxy server.
48 |
49 | If not, will raise AssertionError
50 | """
51 | assert resp.is_error, f"Not a error response: {resp}"
52 | try:
53 | resp_body = resp.json()
54 | except Exception:
55 | pytest.fail(f"Not from proxy server: {resp}")
56 | # 这两条消息是代理服务器主动返回的错误信息的API的一部分
57 | assert "err_type" in resp_body["detail"]
58 | assert "msg" in resp_body["detail"]
59 |
--------------------------------------------------------------------------------