├── .devcontainer ├── Dockerfile └── devcontainer.json ├── .github ├── CODEOWNERS ├── dependabot.yml └── workflows │ ├── ci.yml │ ├── conventional-commits-lint.js │ ├── conventional-commits.yml │ └── stale.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── .release-please-manifest.json ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── Makefile ├── README.md ├── docs ├── Makefile ├── api │ ├── client.rst │ ├── exceptions.rst │ ├── filters.rst │ ├── index.rst │ ├── request_builders.rst │ ├── responses.rst │ └── types.rst ├── conf.py ├── examples │ ├── basic_queries.rst │ ├── index.rst │ └── logging.rst ├── index.rst ├── make.bat └── requirements.txt ├── infra ├── docker-compose.yaml └── init.sql ├── poetry.lock ├── postgrest ├── __init__.py ├── _async │ ├── __init__.py │ ├── client.py │ └── request_builder.py ├── _sync │ ├── __init__.py │ ├── client.py │ └── request_builder.py ├── base_client.py ├── base_request_builder.py ├── constants.py ├── deprecated_client.py ├── deprecated_get_request_builder.py ├── exceptions.py ├── py.typed ├── types.py ├── utils.py └── version.py ├── pyproject.toml ├── release-please-config.json └── tests ├── __init__.py ├── _async ├── __init__.py ├── client.py ├── test_client.py ├── test_filter_request_builder.py ├── test_filter_request_builder_integration.py ├── test_query_request_builder.py └── test_request_builder.py ├── _sync ├── __init__.py ├── client.py ├── test_client.py ├── test_filter_request_builder.py ├── test_filter_request_builder_integration.py ├── test_query_request_builder.py └── test_request_builder.py └── test_utils.py /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.203.0/containers/python-3/.devcontainer/base.Dockerfile 2 | 3 | # [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster 4 | ARG VARIANT="3.10-bullseye" 5 | FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} 6 | 7 | # [Choice] Node.js version: none, lts/*, 16, 14, 12, 10 8 | ARG NODE_VERSION="none" 9 | RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi 10 | 11 | # [Optional] If your pip requirements rarely change, uncomment this section to add them to the image. 12 | # COPY requirements.txt /tmp/pip-tmp/ 13 | # RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \ 14 | # && rm -rf /tmp/pip-tmp 15 | 16 | # [Optional] Uncomment this section to install additional OS packages. 17 | # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ 18 | # && apt-get -y install --no-install-recommends 19 | 20 | # [Optional] Uncomment this line to install global node packages. 21 | # RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g " 2>&1 22 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: 2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.203.0/containers/python-3 3 | { 4 | "name": "Python 3", 5 | "runArgs": [ 6 | "--init" 7 | ], 8 | "build": { 9 | "dockerfile": "Dockerfile", 10 | "context": "..", 11 | "args": { 12 | // Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6 13 | // Append -bullseye or -buster to pin to an OS version. 14 | // Use -bullseye variants on local on arm64/Apple Silicon. 15 | "VARIANT": "3.10-bullseye", 16 | // Options 17 | "NODE_VERSION": "lts/*" 18 | } 19 | }, 20 | // Set *default* container specific settings.json values on container create. 21 | "settings": { 22 | "python.pythonPath": "/usr/local/bin/python", 23 | "python.languageServer": "Pylance", 24 | "python.linting.enabled": true, 25 | "python.linting.flake8Enabled": true, 26 | "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", 27 | "python.formatting.blackPath": "/usr/local/py-utils/bin/black", 28 | "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", 29 | "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", 30 | "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", 31 | "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", 32 | "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", 33 | "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", 34 | "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint", 35 | "python.analysis.diagnosticMode": "workspace", 36 | "files.exclude": { 37 | "**/.ipynb_checkpoints": true, 38 | "**/.pytest_cache": true, 39 | "**/*pycache*": true 40 | }, 41 | "python.formatting.provider": "black", 42 | "python.linting.flake8Args": [ 43 | "--max-line-length=88", 44 | "--extend-ignore=E203" 45 | ], 46 | "editor.formatOnSave": true, 47 | "editor.codeActionsOnSave": { 48 | "source.organizeImports": true 49 | }, 50 | "python.sortImports.args": [ 51 | "--multi-line=3", 52 | "--trailing-comma", 53 | "--force-grid-wrap=0", 54 | "--use-parentheses", 55 | "--line-width=88", 56 | ], 57 | "markdownlint.config": { 58 | "MD022": false, 59 | "MD024": false, 60 | "MD032": false, 61 | "MD033": false 62 | } 63 | }, 64 | // Add the IDs of extensions you want installed when the container is created. 65 | "extensions": [ 66 | "ms-python.python", 67 | "ms-python.vscode-pylance", 68 | "ms-azuretools.vscode-docker", 69 | "donjayamanne.githistory", 70 | "felipecaputo.git-project-manager", 71 | "github.copilot-nightly", 72 | "eamodio.gitlens", 73 | "davidanson.vscode-markdownlint" 74 | ], 75 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 76 | // "forwardPorts": [], 77 | // Use 'postCreateCommand' to run commands after the container is created. 78 | // "postCreateCommand": "pip3 install --user -r requirements.txt", 79 | // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. 80 | "remoteUser": "vscode", 81 | "features": { 82 | "docker-in-docker": "latest", 83 | "git": "latest", 84 | "git-lfs": "latest", 85 | "github-cli": "latest" 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @silentworks 2 | * @grdsdev 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | target-branch: "main" 8 | - package-ecosystem: "pip" 9 | directory: "/" 10 | schedule: 11 | interval: "daily" 12 | target-branch: "main" 13 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI/CD 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | workflow_dispatch: 9 | 10 | jobs: 11 | test: 12 | name: Test / OS ${{ matrix.os }} / Python ${{ matrix.python-version }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-latest] 16 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 17 | runs-on: ${{ matrix.os }} 18 | steps: 19 | - name: Clone Repository 20 | uses: actions/checkout@v4 21 | 22 | - name: Set up Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | 27 | - name: Set up Poetry 28 | run: pipx install poetry==1.8.5 --python python${{ matrix.python-version }} 29 | 30 | - name: Run Tests 31 | run: make run_tests 32 | 33 | - name: Upload coverage to Coveralls 34 | uses: coverallsapp/github-action@v2 35 | with: 36 | github-token: ${{ secrets.GITHUB_TOKEN }} 37 | flag-name: run-${{ join(matrix.*, '-') }} 38 | parallel: true 39 | 40 | finish_tests: 41 | needs: test 42 | name: Upload tests coveralls results 43 | if: ${{ always() }} 44 | runs-on: ubuntu-latest 45 | steps: 46 | - name: Coveralls Finished 47 | uses: coverallsapp/github-action@v2 48 | with: 49 | github-token: ${{ secrets.GITHUB_TOKEN }} 50 | parallel-finished: true 51 | carryforward: "run-ubuntu-latest-3.9,run-ubuntu-latest-3.10,run-ubuntu-latest-3.11,run-ubuntu-latest-3.12,run-ubuntu-latest-3.13" 52 | 53 | release-please: 54 | needs: test 55 | if: ${{ github.ref == 'refs/heads/main' && github.event_name == 'push' && github.repository_owner == 'supabase' }} 56 | runs-on: ubuntu-latest 57 | name: "Bump version and create changelog" 58 | permissions: 59 | id-token: write # IMPORTANT: this permission is mandatory for trusted publishing 60 | contents: write # needed for github actions bot to write to repo 61 | pull-requests: write 62 | steps: 63 | - uses: googleapis/release-please-action@v4 64 | id: release 65 | with: 66 | target-branch: ${{ github.ref_name }} 67 | publish: 68 | needs: release-please 69 | if: ${{ startsWith(github.event.head_commit.message, 'chore(main)') && github.ref == 'refs/heads/main' && github.event_name == 'push' && github.repository_owner == 'supabase' }} 70 | runs-on: ubuntu-latest 71 | name: "Publish to PyPi" 72 | environment: 73 | name: pypi 74 | url: https://pypi.org/p/postgrest 75 | permissions: 76 | id-token: write # IMPORTANT: this permission is mandatory for trusted publishing 77 | contents: write # needed for github actions bot to write to repo 78 | steps: 79 | - name: Set up Python 3.11 80 | uses: actions/setup-python@v5 81 | with: 82 | python-version: 3.11 83 | 84 | - name: Set up Poetry 85 | run: pipx install poetry==1.8.5 --python python3.11 86 | 87 | - uses: actions/checkout@v4 88 | with: 89 | fetch-depth: 0 90 | 91 | - name: Install dependencies 92 | run: poetry install 93 | 94 | - name: Build package distribution directory 95 | id: build_dist 96 | run: poetry build 97 | 98 | - name: Publish package distributions to PyPI 99 | uses: pypa/gh-action-pypi-publish@release/v1 100 | -------------------------------------------------------------------------------- /.github/workflows/conventional-commits-lint.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const fs = require("fs"); 4 | 5 | const TITLE_PATTERN = 6 | /^(?[^:!(]+)(?\([^)]+\))?(?[!])?:.+$/; 7 | const RELEASE_AS_DIRECTIVE = /^\s*Release-As:/im; 8 | const BREAKING_CHANGE_DIRECTIVE = /^\s*BREAKING[ \t]+CHANGE:/im; 9 | 10 | const ALLOWED_CONVENTIONAL_COMMIT_PREFIXES = [ 11 | "revert", 12 | "feat", 13 | "fix", 14 | "ci", 15 | "docs", 16 | "chore", 17 | "style", 18 | "test", 19 | "refactor", 20 | ]; 21 | 22 | const object = process.argv[2]; 23 | const payload = JSON.parse(fs.readFileSync(process.stdin.fd, "utf-8")); 24 | 25 | let validate = []; 26 | 27 | if (object === "pr") { 28 | validate.push({ 29 | title: payload.pull_request.title, 30 | content: payload.pull_request.body, 31 | }); 32 | } else if (object === "push") { 33 | validate.push( 34 | ...payload.commits 35 | .map((commit) => ({ 36 | title: commit.message.split("\n")[0], 37 | content: commit.message, 38 | })) 39 | .filter(({ title }) => !title.startsWith("Merge branch ") && !title.startsWith("Revert ")), 40 | ); 41 | } else { 42 | console.error( 43 | `Unknown object for first argument "${object}", use 'pr' or 'push'.`, 44 | ); 45 | process.exit(0); 46 | } 47 | 48 | let failed = false; 49 | 50 | validate.forEach((payload) => { 51 | if (payload.title) { 52 | const match = payload.title.match(TITLE_PATTERN); 53 | if (!match) { 54 | return 55 | } 56 | 57 | const { groups } = match 58 | 59 | if (groups) { 60 | if ( 61 | !ALLOWED_CONVENTIONAL_COMMIT_PREFIXES.find( 62 | (prefix) => prefix === groups.prefix, 63 | ) 64 | ) { 65 | console.error( 66 | `PR (or a commit in it) is using a disallowed conventional commit prefix ("${groups.prefix}"). Only ${ALLOWED_CONVENTIONAL_COMMIT_PREFIXES.join(", ")} are allowed. Make sure the prefix is lowercase!`, 67 | ); 68 | failed = true; 69 | } 70 | } else { 71 | console.error( 72 | "PR or commit title must match conventional commit structure.", 73 | ); 74 | failed = true; 75 | } 76 | } 77 | 78 | if (payload.content) { 79 | if (payload.content.match(RELEASE_AS_DIRECTIVE)) { 80 | console.error( 81 | "PR descriptions or commit messages must not contain Release-As conventional commit directives.", 82 | ); 83 | failed = true; 84 | } 85 | } 86 | }); 87 | 88 | if (failed) { 89 | process.exit(1); 90 | } 91 | 92 | process.exit(0); 93 | -------------------------------------------------------------------------------- /.github/workflows/conventional-commits.yml: -------------------------------------------------------------------------------- 1 | name: Check pull requests 2 | 3 | on: 4 | push: 5 | branches-ignore: # Run the checks on all branches but the protected ones 6 | - main 7 | - release/* 8 | 9 | pull_request_target: 10 | branches: 11 | - main 12 | - release/* 13 | types: 14 | - opened 15 | - edited 16 | - reopened 17 | - ready_for_review 18 | 19 | jobs: 20 | check-conventional-commits: 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v4 25 | with: 26 | sparse-checkout: | 27 | .github 28 | 29 | - if: ${{ github.event_name == 'pull_request_target' }} 30 | run: | 31 | set -ex 32 | 33 | node .github/workflows/conventional-commits-lint.js pr < 19 | This issue is stale because it has been open for 365 days with no activity. 20 | stale-pr-message: > 21 | This pull request is stale because it has been open for 365 days with no activity. 22 | close-issue-message: > 23 | This issue has been marked as stale and closed due to inactivity. 24 | close-pr-message: > 25 | This pull request has been marked as stale and closed due to inactivity. 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # PyCharm 132 | .idea/ 133 | 134 | # Visual Studio Code 135 | .vscode/ 136 | 137 | # Poetry local config 138 | poetry.toml 139 | 140 | # MacOS annoyance 141 | .DS_Store 142 | **/.DS_Store 143 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: '^.*\.(md|MD)$' 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v5.0.0 5 | hooks: 6 | - id: trailing-whitespace 7 | - id: check-added-large-files 8 | - id: end-of-file-fixer 9 | - id: mixed-line-ending 10 | args: ["--fix=lf"] 11 | 12 | - repo: https://github.com/pycqa/isort 13 | rev: "6.0.0" 14 | hooks: 15 | - id: isort 16 | args: 17 | [ 18 | "--profile", 19 | "black", 20 | "--multi-line=3", 21 | "--trailing-comma", 22 | "--force-grid-wrap=0", 23 | "--use-parentheses", 24 | "--line-width=88", 25 | ] 26 | 27 | - repo: https://github.com/PyCQA/autoflake.git 28 | rev: v2.3.1 29 | hooks: 30 | - id: autoflake 31 | args: 32 | [ 33 | "--in-place", 34 | "--remove-all-unused-imports", 35 | "--ignore-init-module-imports", 36 | ] 37 | 38 | - repo: https://github.com/psf/black 39 | rev: "24.4.0" 40 | hooks: 41 | - id: black 42 | args: [--line-length, "90"] 43 | 44 | - repo: https://github.com/asottile/pyupgrade 45 | rev: v3.15.2 46 | hooks: 47 | - id: pyupgrade 48 | args: ["--py37-plus", "--keep-runtime-typing"] 49 | 50 | - repo: https://github.com/commitizen-tools/commitizen 51 | rev: v3.22.0 52 | hooks: 53 | - id: commitizen 54 | stages: [commit-msg] 55 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-20.04 5 | tools: 6 | python: "3.9" 7 | 8 | python: 9 | install: 10 | - requirements: docs/requirements.txt 11 | 12 | 13 | - method: pip 14 | path: . 15 | -------------------------------------------------------------------------------- /.release-please-manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | ".": "1.0.2" 3 | } 4 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to make participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies within all project spaces, and it also applies when 49 | an individual is representing the project or its community in public spaces. 50 | Examples of representing a project or community include using an official 51 | project e-mail address, posting via an official social media account, or acting 52 | as an appointed representative at an online or offline event. Representation of 53 | a project may be further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at [INSERT EMAIL ADDRESS]. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | 77 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | We highly appreciate feedback and contributions from the community! If you'd like to contribute to this project, please make sure to review and follow the guidelines below. 4 | 5 | ## Code of conduct 6 | 7 | In the interest of fostering an open and welcoming environment, please review and follow our [code of conduct](./CODE_OF_CONDUCT.md). 8 | 9 | ## Code and copy reviews 10 | 11 | All submissions, including submissions by project members, require review. We 12 | use GitHub pull requests for this purpose. Consult 13 | [GitHub Help](https://help.github.com/articles/about-pull-requests) for more 14 | information on using pull requests. 15 | 16 | ## Report an issue 17 | 18 | Report all issues through [GitHub Issues](./issues). 19 | 20 | ## File a feature request 21 | 22 | File your feature request through [GitHub Issues](./issues). 23 | 24 | ## Create a pull request 25 | 26 | When making pull requests to the repository, make sure to follow these guidelines for both bug fixes and new features: 27 | 28 | - Before creating a pull request, file a GitHub Issue so that maintainers and the community can discuss the problem and potential solutions before you spend time on an implementation. 29 | - In your PR's description, link to any related issues or pull requests to give reviewers the full context of your change. 30 | - For commit messages, follow the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0) format. 31 | - For example, if you update documentation for a specific extension, your commit message might be: `docs(extension-name) updated installation documentation`. 32 | diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Supabase 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | install: 2 | poetry install 3 | 4 | install_poetry: 5 | curl -sSL https://install.python-poetry.org | python - 6 | 7 | tests: install tests_only tests_pre_commit 8 | 9 | tests_pre_commit: 10 | poetry run pre-commit run --all-files 11 | 12 | tests_only: 13 | poetry run pytest --cov=./ --cov-report=xml -vv 14 | 15 | run_infra: 16 | cd infra &&\ 17 | docker compose down &&\ 18 | docker compose up -d 19 | 20 | clean_infra: 21 | cd infra &&\ 22 | docker compose down --remove-orphans &&\ 23 | docker system prune -a --volumes -f 24 | 25 | stop_infra: 26 | cd infra &&\ 27 | docker compose down --remove-orphans 28 | 29 | run_tests: run_infra sleep tests 30 | 31 | run_unasync: 32 | poetry run unasync postgrest tests 33 | 34 | build_sync: run_unasync remove_pytest_asyncio_from_sync 35 | 36 | remove_pytest_asyncio_from_sync: 37 | sed -i 's/@pytest.mark.asyncio//g' tests/_sync/test_client.py 38 | sed -i 's/_async/_sync/g' tests/_sync/test_client.py 39 | sed -i 's/Async/Sync/g' tests/_sync/test_client.py 40 | sed -i 's/_client\.SyncClient/_client\.Client/g' tests/_sync/test_client.py 41 | 42 | sleep: 43 | sleep 2 44 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # postgrest-py 2 | 3 | [PostgREST](https://postgrest.org) client for Python. This library provides an "ORM-like" interface to PostgREST. 4 | 5 | ## INSTALLATION 6 | 7 | ### Requirements 8 | 9 | - Python >= 3.9 10 | - PostgreSQL >= 13 11 | - PostgREST >= 11 12 | 13 | ### Local PostgREST server 14 | 15 | If you want to use a local PostgREST server for development, you can use our preconfigured instance via Docker Compose. 16 | 17 | ```sh 18 | docker-compose up 19 | ``` 20 | 21 | Once Docker Compose started, PostgREST is accessible at . 22 | 23 | ### Instructions 24 | 25 | #### With Poetry (recommended) 26 | 27 | ```sh 28 | poetry add postgrest 29 | ``` 30 | 31 | #### With Pip 32 | 33 | ```sh 34 | pip install postgrest 35 | ``` 36 | 37 | ## USAGE 38 | 39 | ### Getting started 40 | 41 | ```py 42 | import asyncio 43 | from postgrest import AsyncPostgrestClient 44 | 45 | async def main(): 46 | async with AsyncPostgrestClient("http://localhost:3000") as client: 47 | r = await client.from_("countries").select("*").execute() 48 | countries = r.data 49 | 50 | asyncio.run(main()) 51 | ``` 52 | 53 | ### Create 54 | 55 | ```py 56 | await client.from_("countries").insert({ "name": "Việt Nam", "capital": "Hà Nội" }).execute() 57 | ``` 58 | 59 | ### Read 60 | 61 | ```py 62 | r = await client.from_("countries").select("id", "name").execute() 63 | countries = r.data 64 | ``` 65 | 66 | ### Update 67 | 68 | ```py 69 | await client.from_("countries").update({"capital": "Hà Nội"}).eq("name", "Việt Nam").execute() 70 | ``` 71 | 72 | ### Delete 73 | 74 | ```py 75 | await client.from_("countries").delete().eq("name", "Việt Nam").execute() 76 | ``` 77 | 78 | ### General filters 79 | 80 | ### Stored procedures (RPC) 81 | ```py 82 | await client.rpc("foobar", {"arg1": "value1", "arg2": "value2"}).execute() 83 | ``` 84 | 85 | ## DEVELOPMENT 86 | 87 | ```sh 88 | git clone https://github.com/supabase/postgrest-py.git 89 | cd postgrest-py 90 | poetry install 91 | poetry run pre-commit install 92 | ``` 93 | 94 | ### Testing 95 | 96 | ```sh 97 | poetry run pytest 98 | ``` 99 | 100 | ## CHANGELOG 101 | 102 | Read more [here](https://github.com/supabase/postgrest-py/blob/main/CHANGELOG.md). 103 | 104 | ## SPONSORS 105 | 106 | We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products don’t exist we build them and open source them ourselves. Thanks to these sponsors who are making the OSS ecosystem better for everyone. 107 | 108 | [![Worklife VC](https://user-images.githubusercontent.com/10214025/90451355-34d71200-e11e-11ea-81f9-1592fd1e9146.png)](https://www.worklife.vc) 109 | [![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase) 110 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/api/client.rst: -------------------------------------------------------------------------------- 1 | Client 2 | ====== 3 | 4 | To run any queries, the first step is to construct a client. 5 | 6 | The library offers both synchronous and asynchronous clients. 7 | 8 | .. autoclass:: postgrest.AsyncPostgrestClient 9 | :members: 10 | :inherited-members: 11 | 12 | .. autoclass:: postgrest.SyncPostgrestClient 13 | :members: 14 | :inherited-members: 15 | -------------------------------------------------------------------------------- /docs/api/exceptions.rst: -------------------------------------------------------------------------------- 1 | Exceptions 2 | ========== 3 | 4 | .. autoexception:: postgrest.APIError 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/api/filters.rst: -------------------------------------------------------------------------------- 1 | Filter Builder 2 | ============== 3 | 4 | This is a kind of `request builder `_. It contains all the methods used to 5 | filter data during queries. 6 | 7 | .. note:: 8 | In the source code, there are separate AsyncFilterRequestBuilders and SyncFilterRequestBuilders. 9 | These classes are otherwise exactly the same, and provide the same interface. 10 | 11 | .. warning:: 12 | These classes are not meant to be constructed by the user. 13 | 14 | .. tip:: 15 | The full list of supported filter operators are on the `PostgREST documentation `_ 16 | 17 | .. tip:: 18 | All the filter methods return a modified instance of the filter builder, allowing fluent chaining of filters. 19 | 20 | 21 | .. autoclass:: postgrest.AsyncFilterRequestBuilder 22 | :members: 23 | :undoc-members: 24 | :inherited-members: 25 | :member-order: bysource 26 | 27 | .. autoclass:: postgrest.SyncFilterRequestBuilder 28 | :members: 29 | :undoc-members: 30 | :inherited-members: 31 | :member-order: bysource 32 | -------------------------------------------------------------------------------- /docs/api/index.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | The library offers both synchronous and asynchronous clients. 5 | Note that the synchronous and asynchronous classes all provide the exact same interface. 6 | 7 | .. toctree:: 8 | :maxdepth: 3 9 | :caption: Contents: 10 | 11 | Client 12 | Request Builders 13 | Filters 14 | Responses 15 | Types 16 | Exceptions 17 | -------------------------------------------------------------------------------- /docs/api/request_builders.rst: -------------------------------------------------------------------------------- 1 | Request Builders 2 | ================ 3 | 4 | .. note:: 5 | In the source code, there are separate synchronous and asynchronous request builder classes. 6 | These classes are otherwise exactly the same, and provide the same interfaces. 7 | 8 | .. warning:: 9 | These classes are not meant to be constructed by the user. 10 | 11 | .. autoclass:: postgrest.AsyncRequestBuilder 12 | :members: 13 | :inherited-members: 14 | 15 | .. autoclass:: postgrest.AsyncSelectRequestBuilder 16 | :members: 17 | :inherited-members: 18 | 19 | .. autoclass:: postgrest.AsyncQueryRequestBuilder 20 | :members: 21 | :inherited-members: 22 | 23 | .. autoclass:: postgrest.SyncRequestBuilder 24 | :members: 25 | :inherited-members: 26 | 27 | .. autoclass:: postgrest.SyncSelectRequestBuilder 28 | :members: 29 | :inherited-members: 30 | 31 | .. autoclass:: postgrest.SyncQueryRequestBuilder 32 | :members: 33 | :inherited-members: 34 | -------------------------------------------------------------------------------- /docs/api/responses.rst: -------------------------------------------------------------------------------- 1 | Responses 2 | ========= 3 | 4 | Once a query is run, the library parses the server's response into an APIResponse object. 5 | 6 | .. autoclass:: postgrest.APIResponse 7 | :members: 8 | -------------------------------------------------------------------------------- /docs/api/types.rst: -------------------------------------------------------------------------------- 1 | Types 2 | ===== 3 | 4 | Some type aliases and enums used in the library. 5 | 6 | .. autoclass:: postgrest.types.CountMethod 7 | :members: 8 | 9 | .. autoclass:: postgrest.types.Filters 10 | :members: 11 | 12 | .. autoclass:: postgrest.types.RequestMethod 13 | :members: 14 | 15 | .. autoclass:: postgrest.types.ReturnMethod 16 | :members: 17 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | # import os 14 | # import sys 15 | # sys.path.insert(0, os.path.abspath('.')) 16 | 17 | 18 | # -- Project information ----------------------------------------------------- 19 | import postgrest 20 | 21 | project = "postgrest-py" 22 | version = postgrest.__version__ 23 | release = version 24 | copyright = ( 25 | "2022, Anand Krishna, Daniel Reinón García, Joel Lee, Leynier Gutiérrez González" 26 | ) 27 | author = "Anand Krishna, Daniel Reinón García, Joel Lee, Leynier Gutiérrez González" 28 | 29 | 30 | # -- General configuration --------------------------------------------------- 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | extensions = [ 36 | "sphinx.ext.autodoc", 37 | "sphinx.ext.napoleon", 38 | "sphinx.ext.extlinks", 39 | ] 40 | 41 | # Napolean config 42 | napoleon_google_docstring = True 43 | 44 | # autodoc config 45 | autodoc_member_order = "bysource" 46 | autodoc_class_signature = "separated" 47 | 48 | # Add any paths that contain templates here, relative to this directory. 49 | templates_path = ["_templates"] 50 | 51 | # List of patterns, relative to source directory, that match files and 52 | # directories to ignore when looking for source files. 53 | # This pattern also affects html_static_path and html_extra_path. 54 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 55 | 56 | 57 | # -- Options for HTML output ------------------------------------------------- 58 | 59 | # The theme to use for HTML and HTML Help pages. See the documentation for 60 | # a list of builtin themes. 61 | # 62 | html_theme = "furo" 63 | 64 | # Add any paths that contain custom static files (such as style sheets) here, 65 | # relative to this directory. They are copied after the builtin static files, 66 | # so a file named "default.css" will overwrite the builtin "default.css". 67 | html_static_path = [] 68 | -------------------------------------------------------------------------------- /docs/examples/basic_queries.rst: -------------------------------------------------------------------------------- 1 | Getting Started 2 | =============== 3 | 4 | We connect to the API and authenticate, and fetch some data. 5 | 6 | .. code-block:: python 7 | :linenos: 8 | 9 | import asyncio 10 | from postgrest import AsyncPostgrestClient 11 | 12 | async def main(): 13 | async with AsyncPostgrestClient("http://localhost:3000") as client: 14 | client.auth("Bearer ") 15 | r = await client.from_("countries").select("*").execute() 16 | countries = r.data 17 | 18 | asyncio.run(main()) 19 | 20 | 21 | **CRUD** 22 | 23 | .. code-block:: python 24 | 25 | await client.from_("countries").insert({ "name": "Việt Nam", "capital": "Hà Nội" }).execute() 26 | 27 | 28 | .. code-block:: python 29 | 30 | r = await client.from_("countries").select("id", "name").execute() 31 | countries = r.data 32 | 33 | 34 | .. code-block:: python 35 | 36 | await client.from_("countries").update({"capital": "Hà Nội"}).eq("name", "Việt Nam").execute() 37 | 38 | .. code-block:: python 39 | 40 | await client.from_("countries").delete().eq("name", "Việt Nam").execute() 41 | 42 | **Calling RPCs** 43 | 44 | .. code-block:: python 45 | 46 | await client.rpc("foo").execute() 47 | 48 | .. code-block:: python 49 | 50 | await client.rpc("bar", {"arg1": "value1", "arg2": "value2"}).execute() 51 | 52 | 53 | **Closing the connection** 54 | 55 | Once you have finished running your queries, close the connection: 56 | 57 | .. code-block:: python 58 | 59 | await client.aclose() 60 | 61 | 62 | You can also use the client with a context manager, which will close the client for you. 63 | 64 | .. code-block:: python 65 | 66 | async with AsyncPostgrestClient("url") as client: 67 | # run queries 68 | # the client is closed when the async with block ends 69 | -------------------------------------------------------------------------------- /docs/examples/index.rst: -------------------------------------------------------------------------------- 1 | Examples 2 | ======== 3 | 4 | .. note:: 5 | The library offers both synchronous and asynchronous clients. In the examples, we use the 6 | async client. However, they should work the same for the sync client as well. 7 | 8 | 9 | .. toctree:: 10 | :maxdepth: 1 11 | :caption: More examples: 12 | 13 | Basic Queries 14 | Logging Requests 15 | -------------------------------------------------------------------------------- /docs/examples/logging.rst: -------------------------------------------------------------------------------- 1 | Logging Requests 2 | ================ 3 | 4 | While debugging, you might want to see the API requests that are being sent for every query. 5 | To do this, just set the logging level to "DEBUG": 6 | 7 | .. code-block:: python 8 | :linenos: 9 | 10 | from logging import basicConfig, DEBUG 11 | from postgrest import SyncPostgrestClient 12 | 13 | basicConfig(level=DEBUG) 14 | 15 | client = SyncPostgrestClient(...) 16 | 17 | client.from_("test").select("*").eq("a", "b").execute() 18 | client.from_("test").select("*").eq("foo", "bar").eq("baz", "spam").execute() 19 | 20 | Output: 21 | 22 | .. code-block:: 23 | 24 | DEBUG:httpx._client:HTTP Request: GET https:///rest/v1/test?select=%2A&a=eq.b "HTTP/1.1 200 OK" 25 | DEBUG:httpx._client:HTTP Request: GET https:///rest/v1/test?select=%2A&foo=eq.bar&baz=eq.spam "HTTP/1.1 200 OK" 26 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to postgrest-py's documentation! 2 | ======================================== 3 | 4 | `PostgREST `_ client library for Python. This library provides an ORM interface to PostgREST. 5 | 6 | .. attention:: 7 | This library is currently unstable. If you find any bugs, please file an `issue `_. 8 | 9 | Installation 10 | ============ 11 | Requirements: 12 | 13 | - Python >= 3.7 14 | 15 | **With pip:** 16 | :: 17 | 18 | pip install postgrest-py 19 | 20 | **With poetry:** 21 | :: 22 | 23 | poetry add postgrest-py 24 | 25 | 26 | .. toctree:: 27 | :maxdepth: 2 28 | :caption: Contents: 29 | 30 | API Reference 31 | Examples 32 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.https://www.sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | furo >= 2022.4.7 2 | Sphinx == 7.4.7 3 | -------------------------------------------------------------------------------- /infra/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # docker-compose.yml 2 | version: '3' 3 | services: 4 | rest: 5 | image: postgrest/postgrest:v11.2.2 6 | ports: 7 | - '3000:3000' 8 | environment: 9 | PGRST_DB_URI: postgres://postgres:postgres@db:5432/postgres 10 | PGRST_DB_SCHEMAS: public,personal 11 | PGRST_DB_EXTRA_SEARCH_PATH: extensions 12 | PGRST_DB_ANON_ROLE: postgres 13 | PGRST_DB_PLAN_ENABLED: 1 14 | PGRST_DB_TX_END: commit-allow-override 15 | depends_on: 16 | - db 17 | db: 18 | image: supabase/postgres:15.1.0.37 19 | ports: 20 | - '5432:5432' 21 | volumes: 22 | - .:/docker-entrypoint-initdb.d/ 23 | environment: 24 | POSTGRES_DB: postgres 25 | POSTGRES_USER: postgres 26 | POSTGRES_PASSWORD: postgres 27 | POSTGRES_HOST: /var/run/postgresql 28 | POSTGRES_PORT: 5432 29 | -------------------------------------------------------------------------------- /infra/init.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE public.countries ( 2 | id int8 PRIMARY KEY, 3 | iso CHAR (2) NOT NULL, 4 | country_name VARCHAR (80) NOT NULL, 5 | nicename VARCHAR (80) NOT NULL, 6 | iso3 CHAR (3) DEFAULT NULL, 7 | numcode SMALLINT DEFAULT NULL, 8 | phonecode INT NOT NULL 9 | ); 10 | 11 | INSERT INTO public.countries (id, iso, country_name, nicename, iso3, numcode, phonecode) VALUES 12 | (1, 'AF', 'AFGHANISTAN', 'Afghanistan', 'AFG', 4, 93), 13 | (2, 'AL', 'ALBANIA', 'Albania', 'ALB', 8, 355), 14 | (3, 'DZ', 'ALGERIA', 'Algeria', 'DZA', 12, 213), 15 | (4, 'AQ', 'ANTARCTICA', 'Antarctica', NULL, NULL, 0), 16 | (5, 'CR', 'COSTA RICA', 'Costa Rica', 'CRI', 188, 506), 17 | (6, 'ES', 'SPAIN', 'Spain', 'ESP', 724, 34), 18 | (7, 'TH', 'THAILAND', 'Thailand', 'THA', 764, 66), 19 | (8, 'TG', 'TOGO', 'Togo', 'TGO', 768, 228), 20 | (9, 'TT', 'TRINIDAD AND TOBAGO', 'Trinidad and Tobago', 'TTO', 780, 1868), 21 | (10, 'GB', 'UNITED KINGDOM', 'United Kingdom', 'GBR', 826, 44), 22 | (11, 'US', 'UNITED STATES', 'United States', 'USA', 840, 1), 23 | (12, 'ZW', 'ZIMBABWE', 'Zimbabwe', 'ZWE', 716, 263); 24 | 25 | create table public.cities ( 26 | id int8 primary key, 27 | country_id int8 not null references public.countries, 28 | name text 29 | ); 30 | 31 | insert into public.cities (id, name, country_id) values 32 | (1, 'London', 10), 33 | (2, 'Manchester', 10), 34 | (3, 'Liverpool', 10), 35 | (4, 'Bristol', 10), 36 | (5, 'Miami', 11), 37 | (6, 'Huston', 11), 38 | (7, 'Atlanta', 11); 39 | 40 | create table public.users ( 41 | id int8 primary key, 42 | name text, 43 | address jsonb 44 | ); 45 | 46 | insert into public.users (id, name, address) values 47 | (1, 'Michael', '{ "postcode": 90210, "street": "Melrose Place" }'), 48 | (2, 'Jane', '{}'); 49 | 50 | create table public.reservations ( 51 | id int8 primary key, 52 | room_name text, 53 | during tsrange 54 | ); 55 | 56 | insert into public.reservations (id, room_name, during) values 57 | (1, 'Emerald', '[2000-01-01 13:00, 2000-01-01 15:00)'), 58 | (2, 'Topaz', '[2000-01-02 09:00, 2000-01-02 10:00)'); 59 | 60 | 61 | create table public.issues ( 62 | id int8 primary key, 63 | title text, 64 | tags text[] 65 | ); 66 | 67 | insert into public.issues (id, title, tags) values 68 | (1, 'Cache invalidation is not working', array['is:open', 'severity:high', 'priority:low']), 69 | (2, 'Use better names', array['is:open', 'severity:low', 'priority:medium']), 70 | (3, 'Add missing postgrest filters', array['is:open', 'severity:low', 'priority:high']), 71 | (4, 'Add alias to filters', array['is:closed', 'severity:low', 'priority:medium']); 72 | 73 | create or replace function public.list_stored_countries() 74 | returns setof countries 75 | language sql 76 | as $function$ 77 | select * from countries; 78 | $function$; 79 | 80 | create or replace function public.search_countries_by_name(search_name text) 81 | returns setof countries 82 | language sql 83 | as $function$ 84 | select * from countries where nicename ilike '%' || search_name || '%'; 85 | $function$; 86 | 87 | create table 88 | orchestral_sections (id int8 primary key, name text); 89 | create table 90 | instruments ( 91 | id int8 primary key, 92 | section_id int8 not null references orchestral_sections, 93 | name text 94 | ); 95 | 96 | insert into 97 | orchestral_sections (id, name) 98 | values 99 | (1, 'strings'), 100 | (2, 'woodwinds'); 101 | insert into 102 | instruments (id, section_id, name) 103 | values 104 | (1, 1, 'harp'), 105 | (2, 1, 'violin'); 106 | -------------------------------------------------------------------------------- /postgrest/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from httpx import Timeout 4 | 5 | from ._async.client import AsyncPostgrestClient 6 | from ._async.request_builder import ( 7 | AsyncFilterRequestBuilder, 8 | AsyncMaybeSingleRequestBuilder, 9 | AsyncQueryRequestBuilder, 10 | AsyncRequestBuilder, 11 | AsyncRPCFilterRequestBuilder, 12 | AsyncSelectRequestBuilder, 13 | AsyncSingleRequestBuilder, 14 | ) 15 | from ._sync.client import SyncPostgrestClient 16 | from ._sync.request_builder import ( 17 | SyncFilterRequestBuilder, 18 | SyncMaybeSingleRequestBuilder, 19 | SyncQueryRequestBuilder, 20 | SyncRequestBuilder, 21 | SyncRPCFilterRequestBuilder, 22 | SyncSelectRequestBuilder, 23 | SyncSingleRequestBuilder, 24 | ) 25 | from .base_request_builder import APIResponse 26 | from .constants import DEFAULT_POSTGREST_CLIENT_HEADERS 27 | from .deprecated_client import Client, PostgrestClient 28 | from .deprecated_get_request_builder import GetRequestBuilder 29 | from .exceptions import APIError 30 | from .version import __version__ 31 | -------------------------------------------------------------------------------- /postgrest/_async/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | -------------------------------------------------------------------------------- /postgrest/_async/client.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Dict, Optional, Union, cast 4 | 5 | from deprecation import deprecated 6 | from httpx import Headers, QueryParams, Timeout 7 | 8 | from ..base_client import BasePostgrestClient 9 | from ..constants import ( 10 | DEFAULT_POSTGREST_CLIENT_HEADERS, 11 | DEFAULT_POSTGREST_CLIENT_TIMEOUT, 12 | ) 13 | from ..types import CountMethod 14 | from ..utils import AsyncClient 15 | from ..version import __version__ 16 | from .request_builder import AsyncRequestBuilder, AsyncRPCFilterRequestBuilder 17 | 18 | _TableT = Dict[str, Any] 19 | 20 | 21 | class AsyncPostgrestClient(BasePostgrestClient): 22 | """PostgREST client.""" 23 | 24 | def __init__( 25 | self, 26 | base_url: str, 27 | *, 28 | schema: str = "public", 29 | headers: Dict[str, str] = DEFAULT_POSTGREST_CLIENT_HEADERS, 30 | timeout: Union[int, float, Timeout] = DEFAULT_POSTGREST_CLIENT_TIMEOUT, 31 | verify: bool = True, 32 | proxy: Optional[str] = None, 33 | ) -> None: 34 | BasePostgrestClient.__init__( 35 | self, 36 | base_url, 37 | schema=schema, 38 | headers=headers, 39 | timeout=timeout, 40 | verify=verify, 41 | proxy=proxy, 42 | ) 43 | self.session = cast(AsyncClient, self.session) 44 | 45 | def create_session( 46 | self, 47 | base_url: str, 48 | headers: Dict[str, str], 49 | timeout: Union[int, float, Timeout], 50 | verify: bool = True, 51 | proxy: Optional[str] = None, 52 | ) -> AsyncClient: 53 | return AsyncClient( 54 | base_url=base_url, 55 | headers=headers, 56 | timeout=timeout, 57 | verify=verify, 58 | proxy=proxy, 59 | follow_redirects=True, 60 | http2=True, 61 | ) 62 | 63 | def schema(self, schema: str): 64 | """Switch to another schema.""" 65 | return AsyncPostgrestClient( 66 | base_url=self.base_url, 67 | schema=schema, 68 | headers=self.headers, 69 | timeout=self.timeout, 70 | verify=self.verify, 71 | proxy=self.proxy, 72 | ) 73 | 74 | async def __aenter__(self) -> AsyncPostgrestClient: 75 | return self 76 | 77 | async def __aexit__(self, exc_type, exc, tb) -> None: 78 | await self.aclose() 79 | 80 | async def aclose(self) -> None: 81 | """Close the underlying HTTP connections.""" 82 | await self.session.aclose() 83 | 84 | def from_(self, table: str) -> AsyncRequestBuilder[_TableT]: 85 | """Perform a table operation. 86 | 87 | Args: 88 | table: The name of the table 89 | Returns: 90 | :class:`AsyncRequestBuilder` 91 | """ 92 | return AsyncRequestBuilder[_TableT](self.session, f"/{table}") 93 | 94 | def table(self, table: str) -> AsyncRequestBuilder[_TableT]: 95 | """Alias to :meth:`from_`.""" 96 | return self.from_(table) 97 | 98 | @deprecated("0.2.0", "1.0.0", __version__, "Use self.from_() instead") 99 | def from_table(self, table: str) -> AsyncRequestBuilder: 100 | """Alias to :meth:`from_`.""" 101 | return self.from_(table) 102 | 103 | def rpc( 104 | self, 105 | func: str, 106 | params: dict, 107 | count: Optional[CountMethod] = None, 108 | head: bool = False, 109 | get: bool = False, 110 | ) -> AsyncRPCFilterRequestBuilder[Any]: 111 | """Perform a stored procedure call. 112 | 113 | Args: 114 | func: The name of the remote procedure to run. 115 | params: The parameters to be passed to the remote procedure. 116 | count: The method to use to get the count of rows returned. 117 | head: When set to `true`, `data` will not be returned. Useful if you only need the count. 118 | get: When set to `true`, the function will be called with read-only access mode. 119 | Returns: 120 | :class:`AsyncRPCFilterRequestBuilder` 121 | Example: 122 | .. code-block:: python 123 | 124 | await client.rpc("foobar", {"arg": "value"}).execute() 125 | 126 | .. versionchanged:: 0.10.9 127 | This method now returns a :class:`AsyncRPCFilterRequestBuilder`. 128 | .. versionchanged:: 0.10.2 129 | This method now returns a :class:`AsyncFilterRequestBuilder` which allows you to 130 | filter on the RPC's resultset. 131 | """ 132 | method = "HEAD" if head else "GET" if get else "POST" 133 | 134 | headers = Headers({"Prefer": f"count={count}"}) if count else Headers() 135 | 136 | if method in ("HEAD", "GET"): 137 | return AsyncRPCFilterRequestBuilder[Any]( 138 | self.session, 139 | f"/rpc/{func}", 140 | method, 141 | headers, 142 | QueryParams(params), 143 | json={}, 144 | ) 145 | # the params here are params to be sent to the RPC and not the queryparams! 146 | return AsyncRPCFilterRequestBuilder[Any]( 147 | self.session, f"/rpc/{func}", method, headers, QueryParams(), json=params 148 | ) 149 | -------------------------------------------------------------------------------- /postgrest/_async/request_builder.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Generic, Optional, TypeVar, Union 4 | 5 | from httpx import Headers, QueryParams 6 | from pydantic import ValidationError 7 | 8 | from ..base_request_builder import ( 9 | APIResponse, 10 | BaseFilterRequestBuilder, 11 | BaseRPCRequestBuilder, 12 | BaseSelectRequestBuilder, 13 | CountMethod, 14 | SingleAPIResponse, 15 | pre_delete, 16 | pre_insert, 17 | pre_select, 18 | pre_update, 19 | pre_upsert, 20 | ) 21 | from ..exceptions import APIError, APIErrorFromJSON, generate_default_error_message 22 | from ..types import ReturnMethod 23 | from ..utils import AsyncClient, get_origin_and_cast 24 | 25 | _ReturnT = TypeVar("_ReturnT") 26 | 27 | 28 | class AsyncQueryRequestBuilder(Generic[_ReturnT]): 29 | def __init__( 30 | self, 31 | session: AsyncClient, 32 | path: str, 33 | http_method: str, 34 | headers: Headers, 35 | params: QueryParams, 36 | json: dict, 37 | ) -> None: 38 | self.session = session 39 | self.path = path 40 | self.http_method = http_method 41 | self.headers = headers 42 | self.params = params 43 | self.json = None if http_method in {"GET", "HEAD"} else json 44 | 45 | async def execute(self) -> APIResponse[_ReturnT]: 46 | """Execute the query. 47 | 48 | .. tip:: 49 | This is the last method called, after the query is built. 50 | 51 | Returns: 52 | :class:`APIResponse` 53 | 54 | Raises: 55 | :class:`APIError` If the API raised an error. 56 | """ 57 | r = await self.session.request( 58 | self.http_method, 59 | self.path, 60 | json=self.json, 61 | params=self.params, 62 | headers=self.headers, 63 | ) 64 | try: 65 | if r.is_success: 66 | if self.http_method != "HEAD": 67 | body = r.text 68 | if self.headers.get("Accept") == "text/csv": 69 | return body 70 | if self.headers.get( 71 | "Accept" 72 | ) and "application/vnd.pgrst.plan" in self.headers.get("Accept"): 73 | if "+json" not in self.headers.get("Accept"): 74 | return body 75 | return APIResponse[_ReturnT].from_http_request_response(r) 76 | else: 77 | json_obj = APIErrorFromJSON.model_validate_json(r.content) 78 | raise APIError(dict(json_obj)) 79 | except ValidationError as e: 80 | raise APIError(generate_default_error_message(r)) 81 | 82 | 83 | class AsyncSingleRequestBuilder(Generic[_ReturnT]): 84 | def __init__( 85 | self, 86 | session: AsyncClient, 87 | path: str, 88 | http_method: str, 89 | headers: Headers, 90 | params: QueryParams, 91 | json: dict, 92 | ) -> None: 93 | self.session = session 94 | self.path = path 95 | self.http_method = http_method 96 | self.headers = headers 97 | self.params = params 98 | self.json = json 99 | 100 | async def execute(self) -> SingleAPIResponse[_ReturnT]: 101 | """Execute the query. 102 | 103 | .. tip:: 104 | This is the last method called, after the query is built. 105 | 106 | Returns: 107 | :class:`SingleAPIResponse` 108 | 109 | Raises: 110 | :class:`APIError` If the API raised an error. 111 | """ 112 | r = await self.session.request( 113 | self.http_method, 114 | self.path, 115 | json=self.json, 116 | params=self.params, 117 | headers=self.headers, 118 | ) 119 | try: 120 | if ( 121 | 200 <= r.status_code <= 299 122 | ): # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok) 123 | return SingleAPIResponse[_ReturnT].from_http_request_response(r) 124 | else: 125 | json_obj = APIErrorFromJSON.model_validate_json(r.content) 126 | raise APIError(dict(json_obj)) 127 | except ValidationError as e: 128 | raise APIError(generate_default_error_message(r)) 129 | 130 | 131 | class AsyncMaybeSingleRequestBuilder(AsyncSingleRequestBuilder[_ReturnT]): 132 | async def execute(self) -> Optional[SingleAPIResponse[_ReturnT]]: 133 | r = None 134 | try: 135 | r = await AsyncSingleRequestBuilder[_ReturnT].execute(self) 136 | except APIError as e: 137 | if e.details and "The result contains 0 rows" in e.details: 138 | return None 139 | if not r: 140 | raise APIError( 141 | { 142 | "message": "Missing response", 143 | "code": "204", 144 | "hint": "Please check traceback of the code", 145 | "details": "Postgrest couldn't retrieve response, please check traceback of the code. Please create an issue in `supabase-community/postgrest-py` if needed.", 146 | } 147 | ) 148 | return r 149 | 150 | 151 | # ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319 152 | class AsyncFilterRequestBuilder(BaseFilterRequestBuilder[_ReturnT], AsyncQueryRequestBuilder[_ReturnT]): # type: ignore 153 | def __init__( 154 | self, 155 | session: AsyncClient, 156 | path: str, 157 | http_method: str, 158 | headers: Headers, 159 | params: QueryParams, 160 | json: dict, 161 | ) -> None: 162 | get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__( 163 | self, session, headers, params 164 | ) 165 | get_origin_and_cast(AsyncQueryRequestBuilder[_ReturnT]).__init__( 166 | self, session, path, http_method, headers, params, json 167 | ) 168 | 169 | 170 | # this exists for type-safety. see https://gist.github.com/anand2312/93d3abf401335fd3310d9e30112303bf 171 | class AsyncRPCFilterRequestBuilder( 172 | BaseRPCRequestBuilder[_ReturnT], AsyncSingleRequestBuilder[_ReturnT] 173 | ): 174 | def __init__( 175 | self, 176 | session: AsyncClient, 177 | path: str, 178 | http_method: str, 179 | headers: Headers, 180 | params: QueryParams, 181 | json: dict, 182 | ) -> None: 183 | get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__( 184 | self, session, headers, params 185 | ) 186 | get_origin_and_cast(AsyncSingleRequestBuilder[_ReturnT]).__init__( 187 | self, session, path, http_method, headers, params, json 188 | ) 189 | 190 | 191 | # ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319 192 | class AsyncSelectRequestBuilder(BaseSelectRequestBuilder[_ReturnT], AsyncQueryRequestBuilder[_ReturnT]): # type: ignore 193 | def __init__( 194 | self, 195 | session: AsyncClient, 196 | path: str, 197 | http_method: str, 198 | headers: Headers, 199 | params: QueryParams, 200 | json: dict, 201 | ) -> None: 202 | get_origin_and_cast(BaseSelectRequestBuilder[_ReturnT]).__init__( 203 | self, session, headers, params 204 | ) 205 | get_origin_and_cast(AsyncQueryRequestBuilder[_ReturnT]).__init__( 206 | self, session, path, http_method, headers, params, json 207 | ) 208 | 209 | def single(self) -> AsyncSingleRequestBuilder[_ReturnT]: 210 | """Specify that the query will only return a single row in response. 211 | 212 | .. caution:: 213 | The API will raise an error if the query returned more than one row. 214 | """ 215 | self.headers["Accept"] = "application/vnd.pgrst.object+json" 216 | return AsyncSingleRequestBuilder[_ReturnT]( 217 | headers=self.headers, 218 | http_method=self.http_method, 219 | json=self.json, 220 | params=self.params, 221 | path=self.path, 222 | session=self.session, # type: ignore 223 | ) 224 | 225 | def maybe_single(self) -> AsyncMaybeSingleRequestBuilder[_ReturnT]: 226 | """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error.""" 227 | self.headers["Accept"] = "application/vnd.pgrst.object+json" 228 | return AsyncMaybeSingleRequestBuilder[_ReturnT]( 229 | headers=self.headers, 230 | http_method=self.http_method, 231 | json=self.json, 232 | params=self.params, 233 | path=self.path, 234 | session=self.session, # type: ignore 235 | ) 236 | 237 | def text_search( 238 | self, column: str, query: str, options: dict[str, Any] = {} 239 | ) -> AsyncFilterRequestBuilder[_ReturnT]: 240 | type_ = options.get("type") 241 | type_part = "" 242 | if type_ == "plain": 243 | type_part = "pl" 244 | elif type_ == "phrase": 245 | type_part = "ph" 246 | elif type_ == "web_search": 247 | type_part = "w" 248 | config_part = f"({options.get('config')})" if options.get("config") else "" 249 | self.params = self.params.add(column, f"{type_part}fts{config_part}.{query}") 250 | 251 | return AsyncQueryRequestBuilder[_ReturnT]( 252 | headers=self.headers, 253 | http_method=self.http_method, 254 | json=self.json, 255 | params=self.params, 256 | path=self.path, 257 | session=self.session, # type: ignore 258 | ) 259 | 260 | def csv(self) -> AsyncSingleRequestBuilder[str]: 261 | """Specify that the query must retrieve data as a single CSV string.""" 262 | self.headers["Accept"] = "text/csv" 263 | return AsyncSingleRequestBuilder[str]( 264 | session=self.session, # type: ignore 265 | path=self.path, 266 | http_method=self.http_method, 267 | headers=self.headers, 268 | params=self.params, 269 | json=self.json, 270 | ) 271 | 272 | 273 | class AsyncRequestBuilder(Generic[_ReturnT]): 274 | def __init__(self, session: AsyncClient, path: str) -> None: 275 | self.session = session 276 | self.path = path 277 | 278 | def select( 279 | self, 280 | *columns: str, 281 | count: Optional[CountMethod] = None, 282 | head: Optional[bool] = None, 283 | ) -> AsyncSelectRequestBuilder[_ReturnT]: 284 | """Run a SELECT query. 285 | 286 | Args: 287 | *columns: The names of the columns to fetch. 288 | count: The method to use to get the count of rows returned. 289 | Returns: 290 | :class:`AsyncSelectRequestBuilder` 291 | """ 292 | method, params, headers, json = pre_select(*columns, count=count, head=head) 293 | return AsyncSelectRequestBuilder[_ReturnT]( 294 | self.session, self.path, method, headers, params, json 295 | ) 296 | 297 | def insert( 298 | self, 299 | json: Union[dict, list], 300 | *, 301 | count: Optional[CountMethod] = None, 302 | returning: ReturnMethod = ReturnMethod.representation, 303 | upsert: bool = False, 304 | default_to_null: bool = True, 305 | ) -> AsyncQueryRequestBuilder[_ReturnT]: 306 | """Run an INSERT query. 307 | 308 | Args: 309 | json: The row to be inserted. 310 | count: The method to use to get the count of rows returned. 311 | returning: Either 'minimal' or 'representation' 312 | upsert: Whether the query should be an upsert. 313 | default_to_null: Make missing fields default to `null`. 314 | Otherwise, use the default value for the column. 315 | Only applies for bulk inserts. 316 | Returns: 317 | :class:`AsyncQueryRequestBuilder` 318 | """ 319 | method, params, headers, json = pre_insert( 320 | json, 321 | count=count, 322 | returning=returning, 323 | upsert=upsert, 324 | default_to_null=default_to_null, 325 | ) 326 | return AsyncQueryRequestBuilder[_ReturnT]( 327 | self.session, self.path, method, headers, params, json 328 | ) 329 | 330 | def upsert( 331 | self, 332 | json: Union[dict, list], 333 | *, 334 | count: Optional[CountMethod] = None, 335 | returning: ReturnMethod = ReturnMethod.representation, 336 | ignore_duplicates: bool = False, 337 | on_conflict: str = "", 338 | default_to_null: bool = True, 339 | ) -> AsyncQueryRequestBuilder[_ReturnT]: 340 | """Run an upsert (INSERT ... ON CONFLICT DO UPDATE) query. 341 | 342 | Args: 343 | json: The row to be inserted. 344 | count: The method to use to get the count of rows returned. 345 | returning: Either 'minimal' or 'representation' 346 | ignore_duplicates: Whether duplicate rows should be ignored. 347 | on_conflict: Specified columns to be made to work with UNIQUE constraint. 348 | default_to_null: Make missing fields default to `null`. Otherwise, use the 349 | default value for the column. This only applies when inserting new rows, 350 | not when merging with existing rows under `ignoreDuplicates: false`. 351 | This also only applies when doing bulk upserts. 352 | Returns: 353 | :class:`AsyncQueryRequestBuilder` 354 | """ 355 | method, params, headers, json = pre_upsert( 356 | json, 357 | count=count, 358 | returning=returning, 359 | ignore_duplicates=ignore_duplicates, 360 | on_conflict=on_conflict, 361 | default_to_null=default_to_null, 362 | ) 363 | return AsyncQueryRequestBuilder[_ReturnT]( 364 | self.session, self.path, method, headers, params, json 365 | ) 366 | 367 | def update( 368 | self, 369 | json: dict, 370 | *, 371 | count: Optional[CountMethod] = None, 372 | returning: ReturnMethod = ReturnMethod.representation, 373 | ) -> AsyncFilterRequestBuilder[_ReturnT]: 374 | """Run an UPDATE query. 375 | 376 | Args: 377 | json: The updated fields. 378 | count: The method to use to get the count of rows returned. 379 | returning: Either 'minimal' or 'representation' 380 | Returns: 381 | :class:`AsyncFilterRequestBuilder` 382 | """ 383 | method, params, headers, json = pre_update( 384 | json, 385 | count=count, 386 | returning=returning, 387 | ) 388 | return AsyncFilterRequestBuilder[_ReturnT]( 389 | self.session, self.path, method, headers, params, json 390 | ) 391 | 392 | def delete( 393 | self, 394 | *, 395 | count: Optional[CountMethod] = None, 396 | returning: ReturnMethod = ReturnMethod.representation, 397 | ) -> AsyncFilterRequestBuilder[_ReturnT]: 398 | """Run a DELETE query. 399 | 400 | Args: 401 | count: The method to use to get the count of rows returned. 402 | returning: Either 'minimal' or 'representation' 403 | Returns: 404 | :class:`AsyncFilterRequestBuilder` 405 | """ 406 | method, params, headers, json = pre_delete( 407 | count=count, 408 | returning=returning, 409 | ) 410 | return AsyncFilterRequestBuilder[_ReturnT]( 411 | self.session, self.path, method, headers, params, json 412 | ) 413 | -------------------------------------------------------------------------------- /postgrest/_sync/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | -------------------------------------------------------------------------------- /postgrest/_sync/client.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Dict, Optional, Union, cast 4 | 5 | from deprecation import deprecated 6 | from httpx import Headers, QueryParams, Timeout 7 | 8 | from ..base_client import BasePostgrestClient 9 | from ..constants import ( 10 | DEFAULT_POSTGREST_CLIENT_HEADERS, 11 | DEFAULT_POSTGREST_CLIENT_TIMEOUT, 12 | ) 13 | from ..types import CountMethod 14 | from ..utils import SyncClient 15 | from ..version import __version__ 16 | from .request_builder import SyncRequestBuilder, SyncRPCFilterRequestBuilder 17 | 18 | _TableT = Dict[str, Any] 19 | 20 | 21 | class SyncPostgrestClient(BasePostgrestClient): 22 | """PostgREST client.""" 23 | 24 | def __init__( 25 | self, 26 | base_url: str, 27 | *, 28 | schema: str = "public", 29 | headers: Dict[str, str] = DEFAULT_POSTGREST_CLIENT_HEADERS, 30 | timeout: Union[int, float, Timeout] = DEFAULT_POSTGREST_CLIENT_TIMEOUT, 31 | verify: bool = True, 32 | proxy: Optional[str] = None, 33 | ) -> None: 34 | BasePostgrestClient.__init__( 35 | self, 36 | base_url, 37 | schema=schema, 38 | headers=headers, 39 | timeout=timeout, 40 | verify=verify, 41 | proxy=proxy, 42 | ) 43 | self.session = cast(SyncClient, self.session) 44 | 45 | def create_session( 46 | self, 47 | base_url: str, 48 | headers: Dict[str, str], 49 | timeout: Union[int, float, Timeout], 50 | verify: bool = True, 51 | proxy: Optional[str] = None, 52 | ) -> SyncClient: 53 | return SyncClient( 54 | base_url=base_url, 55 | headers=headers, 56 | timeout=timeout, 57 | verify=verify, 58 | proxy=proxy, 59 | follow_redirects=True, 60 | http2=True, 61 | ) 62 | 63 | def schema(self, schema: str): 64 | """Switch to another schema.""" 65 | return SyncPostgrestClient( 66 | base_url=self.base_url, 67 | schema=schema, 68 | headers=self.headers, 69 | timeout=self.timeout, 70 | verify=self.verify, 71 | proxy=self.proxy, 72 | ) 73 | 74 | def __enter__(self) -> SyncPostgrestClient: 75 | return self 76 | 77 | def __exit__(self, exc_type, exc, tb) -> None: 78 | self.aclose() 79 | 80 | def aclose(self) -> None: 81 | """Close the underlying HTTP connections.""" 82 | self.session.aclose() 83 | 84 | def from_(self, table: str) -> SyncRequestBuilder[_TableT]: 85 | """Perform a table operation. 86 | 87 | Args: 88 | table: The name of the table 89 | Returns: 90 | :class:`AsyncRequestBuilder` 91 | """ 92 | return SyncRequestBuilder[_TableT](self.session, f"/{table}") 93 | 94 | def table(self, table: str) -> SyncRequestBuilder[_TableT]: 95 | """Alias to :meth:`from_`.""" 96 | return self.from_(table) 97 | 98 | @deprecated("0.2.0", "1.0.0", __version__, "Use self.from_() instead") 99 | def from_table(self, table: str) -> SyncRequestBuilder: 100 | """Alias to :meth:`from_`.""" 101 | return self.from_(table) 102 | 103 | def rpc( 104 | self, 105 | func: str, 106 | params: dict, 107 | count: Optional[CountMethod] = None, 108 | head: bool = False, 109 | get: bool = False, 110 | ) -> SyncRPCFilterRequestBuilder[Any]: 111 | """Perform a stored procedure call. 112 | 113 | Args: 114 | func: The name of the remote procedure to run. 115 | params: The parameters to be passed to the remote procedure. 116 | count: The method to use to get the count of rows returned. 117 | head: When set to `true`, `data` will not be returned. Useful if you only need the count. 118 | get: When set to `true`, the function will be called with read-only access mode. 119 | Returns: 120 | :class:`AsyncRPCFilterRequestBuilder` 121 | Example: 122 | .. code-block:: python 123 | 124 | await client.rpc("foobar", {"arg": "value"}).execute() 125 | 126 | .. versionchanged:: 0.10.9 127 | This method now returns a :class:`AsyncRPCFilterRequestBuilder`. 128 | .. versionchanged:: 0.10.2 129 | This method now returns a :class:`AsyncFilterRequestBuilder` which allows you to 130 | filter on the RPC's resultset. 131 | """ 132 | method = "HEAD" if head else "GET" if get else "POST" 133 | 134 | headers = Headers({"Prefer": f"count={count}"}) if count else Headers() 135 | 136 | if method in ("HEAD", "GET"): 137 | return SyncRPCFilterRequestBuilder[Any]( 138 | self.session, 139 | f"/rpc/{func}", 140 | method, 141 | headers, 142 | QueryParams(params), 143 | json={}, 144 | ) 145 | # the params here are params to be sent to the RPC and not the queryparams! 146 | return SyncRPCFilterRequestBuilder[Any]( 147 | self.session, f"/rpc/{func}", method, headers, QueryParams(), json=params 148 | ) 149 | -------------------------------------------------------------------------------- /postgrest/_sync/request_builder.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any, Generic, Optional, TypeVar, Union 4 | 5 | from httpx import Headers, QueryParams 6 | from pydantic import ValidationError 7 | 8 | from ..base_request_builder import ( 9 | APIResponse, 10 | BaseFilterRequestBuilder, 11 | BaseRPCRequestBuilder, 12 | BaseSelectRequestBuilder, 13 | CountMethod, 14 | SingleAPIResponse, 15 | pre_delete, 16 | pre_insert, 17 | pre_select, 18 | pre_update, 19 | pre_upsert, 20 | ) 21 | from ..exceptions import APIError, APIErrorFromJSON, generate_default_error_message 22 | from ..types import ReturnMethod 23 | from ..utils import SyncClient, get_origin_and_cast 24 | 25 | _ReturnT = TypeVar("_ReturnT") 26 | 27 | 28 | class SyncQueryRequestBuilder(Generic[_ReturnT]): 29 | def __init__( 30 | self, 31 | session: SyncClient, 32 | path: str, 33 | http_method: str, 34 | headers: Headers, 35 | params: QueryParams, 36 | json: dict, 37 | ) -> None: 38 | self.session = session 39 | self.path = path 40 | self.http_method = http_method 41 | self.headers = headers 42 | self.params = params 43 | self.json = None if http_method in {"GET", "HEAD"} else json 44 | 45 | def execute(self) -> APIResponse[_ReturnT]: 46 | """Execute the query. 47 | 48 | .. tip:: 49 | This is the last method called, after the query is built. 50 | 51 | Returns: 52 | :class:`APIResponse` 53 | 54 | Raises: 55 | :class:`APIError` If the API raised an error. 56 | """ 57 | r = self.session.request( 58 | self.http_method, 59 | self.path, 60 | json=self.json, 61 | params=self.params, 62 | headers=self.headers, 63 | ) 64 | try: 65 | if r.is_success: 66 | if self.http_method != "HEAD": 67 | body = r.text 68 | if self.headers.get("Accept") == "text/csv": 69 | return body 70 | if self.headers.get( 71 | "Accept" 72 | ) and "application/vnd.pgrst.plan" in self.headers.get("Accept"): 73 | if "+json" not in self.headers.get("Accept"): 74 | return body 75 | return APIResponse[_ReturnT].from_http_request_response(r) 76 | else: 77 | json_obj = APIErrorFromJSON.model_validate_json(r.content) 78 | raise APIError(dict(json_obj)) 79 | except ValidationError as e: 80 | raise APIError(generate_default_error_message(r)) 81 | 82 | 83 | class SyncSingleRequestBuilder(Generic[_ReturnT]): 84 | def __init__( 85 | self, 86 | session: SyncClient, 87 | path: str, 88 | http_method: str, 89 | headers: Headers, 90 | params: QueryParams, 91 | json: dict, 92 | ) -> None: 93 | self.session = session 94 | self.path = path 95 | self.http_method = http_method 96 | self.headers = headers 97 | self.params = params 98 | self.json = json 99 | 100 | def execute(self) -> SingleAPIResponse[_ReturnT]: 101 | """Execute the query. 102 | 103 | .. tip:: 104 | This is the last method called, after the query is built. 105 | 106 | Returns: 107 | :class:`SingleAPIResponse` 108 | 109 | Raises: 110 | :class:`APIError` If the API raised an error. 111 | """ 112 | r = self.session.request( 113 | self.http_method, 114 | self.path, 115 | json=self.json, 116 | params=self.params, 117 | headers=self.headers, 118 | ) 119 | try: 120 | if ( 121 | 200 <= r.status_code <= 299 122 | ): # Response.ok from JS (https://developer.mozilla.org/en-US/docs/Web/API/Response/ok) 123 | return SingleAPIResponse[_ReturnT].from_http_request_response(r) 124 | else: 125 | json_obj = APIErrorFromJSON.model_validate_json(r.content) 126 | raise APIError(dict(json_obj)) 127 | except ValidationError as e: 128 | raise APIError(generate_default_error_message(r)) 129 | 130 | 131 | class SyncMaybeSingleRequestBuilder(SyncSingleRequestBuilder[_ReturnT]): 132 | def execute(self) -> Optional[SingleAPIResponse[_ReturnT]]: 133 | r = None 134 | try: 135 | r = SyncSingleRequestBuilder[_ReturnT].execute(self) 136 | except APIError as e: 137 | if e.details and "The result contains 0 rows" in e.details: 138 | return None 139 | if not r: 140 | raise APIError( 141 | { 142 | "message": "Missing response", 143 | "code": "204", 144 | "hint": "Please check traceback of the code", 145 | "details": "Postgrest couldn't retrieve response, please check traceback of the code. Please create an issue in `supabase-community/postgrest-py` if needed.", 146 | } 147 | ) 148 | return r 149 | 150 | 151 | # ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319 152 | class SyncFilterRequestBuilder(BaseFilterRequestBuilder[_ReturnT], SyncQueryRequestBuilder[_ReturnT]): # type: ignore 153 | def __init__( 154 | self, 155 | session: SyncClient, 156 | path: str, 157 | http_method: str, 158 | headers: Headers, 159 | params: QueryParams, 160 | json: dict, 161 | ) -> None: 162 | get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__( 163 | self, session, headers, params 164 | ) 165 | get_origin_and_cast(SyncQueryRequestBuilder[_ReturnT]).__init__( 166 | self, session, path, http_method, headers, params, json 167 | ) 168 | 169 | 170 | # this exists for type-safety. see https://gist.github.com/anand2312/93d3abf401335fd3310d9e30112303bf 171 | class SyncRPCFilterRequestBuilder( 172 | BaseRPCRequestBuilder[_ReturnT], SyncSingleRequestBuilder[_ReturnT] 173 | ): 174 | def __init__( 175 | self, 176 | session: SyncClient, 177 | path: str, 178 | http_method: str, 179 | headers: Headers, 180 | params: QueryParams, 181 | json: dict, 182 | ) -> None: 183 | get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__( 184 | self, session, headers, params 185 | ) 186 | get_origin_and_cast(SyncSingleRequestBuilder[_ReturnT]).__init__( 187 | self, session, path, http_method, headers, params, json 188 | ) 189 | 190 | 191 | # ignoring type checking as a workaround for https://github.com/python/mypy/issues/9319 192 | class SyncSelectRequestBuilder(BaseSelectRequestBuilder[_ReturnT], SyncQueryRequestBuilder[_ReturnT]): # type: ignore 193 | def __init__( 194 | self, 195 | session: SyncClient, 196 | path: str, 197 | http_method: str, 198 | headers: Headers, 199 | params: QueryParams, 200 | json: dict, 201 | ) -> None: 202 | get_origin_and_cast(BaseSelectRequestBuilder[_ReturnT]).__init__( 203 | self, session, headers, params 204 | ) 205 | get_origin_and_cast(SyncQueryRequestBuilder[_ReturnT]).__init__( 206 | self, session, path, http_method, headers, params, json 207 | ) 208 | 209 | def single(self) -> SyncSingleRequestBuilder[_ReturnT]: 210 | """Specify that the query will only return a single row in response. 211 | 212 | .. caution:: 213 | The API will raise an error if the query returned more than one row. 214 | """ 215 | self.headers["Accept"] = "application/vnd.pgrst.object+json" 216 | return SyncSingleRequestBuilder[_ReturnT]( 217 | headers=self.headers, 218 | http_method=self.http_method, 219 | json=self.json, 220 | params=self.params, 221 | path=self.path, 222 | session=self.session, # type: ignore 223 | ) 224 | 225 | def maybe_single(self) -> SyncMaybeSingleRequestBuilder[_ReturnT]: 226 | """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error.""" 227 | self.headers["Accept"] = "application/vnd.pgrst.object+json" 228 | return SyncMaybeSingleRequestBuilder[_ReturnT]( 229 | headers=self.headers, 230 | http_method=self.http_method, 231 | json=self.json, 232 | params=self.params, 233 | path=self.path, 234 | session=self.session, # type: ignore 235 | ) 236 | 237 | def text_search( 238 | self, column: str, query: str, options: dict[str, Any] = {} 239 | ) -> SyncFilterRequestBuilder[_ReturnT]: 240 | type_ = options.get("type") 241 | type_part = "" 242 | if type_ == "plain": 243 | type_part = "pl" 244 | elif type_ == "phrase": 245 | type_part = "ph" 246 | elif type_ == "web_search": 247 | type_part = "w" 248 | config_part = f"({options.get('config')})" if options.get("config") else "" 249 | self.params = self.params.add(column, f"{type_part}fts{config_part}.{query}") 250 | 251 | return SyncQueryRequestBuilder[_ReturnT]( 252 | headers=self.headers, 253 | http_method=self.http_method, 254 | json=self.json, 255 | params=self.params, 256 | path=self.path, 257 | session=self.session, # type: ignore 258 | ) 259 | 260 | def csv(self) -> SyncSingleRequestBuilder[str]: 261 | """Specify that the query must retrieve data as a single CSV string.""" 262 | self.headers["Accept"] = "text/csv" 263 | return SyncSingleRequestBuilder[str]( 264 | session=self.session, # type: ignore 265 | path=self.path, 266 | http_method=self.http_method, 267 | headers=self.headers, 268 | params=self.params, 269 | json=self.json, 270 | ) 271 | 272 | 273 | class SyncRequestBuilder(Generic[_ReturnT]): 274 | def __init__(self, session: SyncClient, path: str) -> None: 275 | self.session = session 276 | self.path = path 277 | 278 | def select( 279 | self, 280 | *columns: str, 281 | count: Optional[CountMethod] = None, 282 | head: Optional[bool] = None, 283 | ) -> SyncSelectRequestBuilder[_ReturnT]: 284 | """Run a SELECT query. 285 | 286 | Args: 287 | *columns: The names of the columns to fetch. 288 | count: The method to use to get the count of rows returned. 289 | Returns: 290 | :class:`AsyncSelectRequestBuilder` 291 | """ 292 | method, params, headers, json = pre_select(*columns, count=count, head=head) 293 | return SyncSelectRequestBuilder[_ReturnT]( 294 | self.session, self.path, method, headers, params, json 295 | ) 296 | 297 | def insert( 298 | self, 299 | json: Union[dict, list], 300 | *, 301 | count: Optional[CountMethod] = None, 302 | returning: ReturnMethod = ReturnMethod.representation, 303 | upsert: bool = False, 304 | default_to_null: bool = True, 305 | ) -> SyncQueryRequestBuilder[_ReturnT]: 306 | """Run an INSERT query. 307 | 308 | Args: 309 | json: The row to be inserted. 310 | count: The method to use to get the count of rows returned. 311 | returning: Either 'minimal' or 'representation' 312 | upsert: Whether the query should be an upsert. 313 | default_to_null: Make missing fields default to `null`. 314 | Otherwise, use the default value for the column. 315 | Only applies for bulk inserts. 316 | Returns: 317 | :class:`AsyncQueryRequestBuilder` 318 | """ 319 | method, params, headers, json = pre_insert( 320 | json, 321 | count=count, 322 | returning=returning, 323 | upsert=upsert, 324 | default_to_null=default_to_null, 325 | ) 326 | return SyncQueryRequestBuilder[_ReturnT]( 327 | self.session, self.path, method, headers, params, json 328 | ) 329 | 330 | def upsert( 331 | self, 332 | json: Union[dict, list], 333 | *, 334 | count: Optional[CountMethod] = None, 335 | returning: ReturnMethod = ReturnMethod.representation, 336 | ignore_duplicates: bool = False, 337 | on_conflict: str = "", 338 | default_to_null: bool = True, 339 | ) -> SyncQueryRequestBuilder[_ReturnT]: 340 | """Run an upsert (INSERT ... ON CONFLICT DO UPDATE) query. 341 | 342 | Args: 343 | json: The row to be inserted. 344 | count: The method to use to get the count of rows returned. 345 | returning: Either 'minimal' or 'representation' 346 | ignore_duplicates: Whether duplicate rows should be ignored. 347 | on_conflict: Specified columns to be made to work with UNIQUE constraint. 348 | default_to_null: Make missing fields default to `null`. Otherwise, use the 349 | default value for the column. This only applies when inserting new rows, 350 | not when merging with existing rows under `ignoreDuplicates: false`. 351 | This also only applies when doing bulk upserts. 352 | Returns: 353 | :class:`AsyncQueryRequestBuilder` 354 | """ 355 | method, params, headers, json = pre_upsert( 356 | json, 357 | count=count, 358 | returning=returning, 359 | ignore_duplicates=ignore_duplicates, 360 | on_conflict=on_conflict, 361 | default_to_null=default_to_null, 362 | ) 363 | return SyncQueryRequestBuilder[_ReturnT]( 364 | self.session, self.path, method, headers, params, json 365 | ) 366 | 367 | def update( 368 | self, 369 | json: dict, 370 | *, 371 | count: Optional[CountMethod] = None, 372 | returning: ReturnMethod = ReturnMethod.representation, 373 | ) -> SyncFilterRequestBuilder[_ReturnT]: 374 | """Run an UPDATE query. 375 | 376 | Args: 377 | json: The updated fields. 378 | count: The method to use to get the count of rows returned. 379 | returning: Either 'minimal' or 'representation' 380 | Returns: 381 | :class:`AsyncFilterRequestBuilder` 382 | """ 383 | method, params, headers, json = pre_update( 384 | json, 385 | count=count, 386 | returning=returning, 387 | ) 388 | return SyncFilterRequestBuilder[_ReturnT]( 389 | self.session, self.path, method, headers, params, json 390 | ) 391 | 392 | def delete( 393 | self, 394 | *, 395 | count: Optional[CountMethod] = None, 396 | returning: ReturnMethod = ReturnMethod.representation, 397 | ) -> SyncFilterRequestBuilder[_ReturnT]: 398 | """Run a DELETE query. 399 | 400 | Args: 401 | count: The method to use to get the count of rows returned. 402 | returning: Either 'minimal' or 'representation' 403 | Returns: 404 | :class:`AsyncFilterRequestBuilder` 405 | """ 406 | method, params, headers, json = pre_delete( 407 | count=count, 408 | returning=returning, 409 | ) 410 | return SyncFilterRequestBuilder[_ReturnT]( 411 | self.session, self.path, method, headers, params, json 412 | ) 413 | -------------------------------------------------------------------------------- /postgrest/base_client.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from abc import ABC, abstractmethod 4 | from typing import Dict, Optional, Union 5 | 6 | from httpx import BasicAuth, Timeout 7 | 8 | from .utils import AsyncClient, SyncClient, is_http_url, is_valid_jwt 9 | 10 | 11 | class BasePostgrestClient(ABC): 12 | """Base PostgREST client.""" 13 | 14 | def __init__( 15 | self, 16 | base_url: str, 17 | *, 18 | schema: str, 19 | headers: Dict[str, str], 20 | timeout: Union[int, float, Timeout], 21 | verify: bool = True, 22 | proxy: Optional[str] = None, 23 | ) -> None: 24 | if not is_http_url(base_url): 25 | ValueError("base_url must be a valid HTTP URL string") 26 | 27 | self.base_url = base_url 28 | self.headers = { 29 | **headers, 30 | "Accept-Profile": schema, 31 | "Content-Profile": schema, 32 | } 33 | self.timeout = timeout 34 | self.verify = verify 35 | self.proxy = proxy 36 | self.session = self.create_session( 37 | self.base_url, self.headers, self.timeout, self.verify, self.proxy 38 | ) 39 | 40 | @abstractmethod 41 | def create_session( 42 | self, 43 | base_url: str, 44 | headers: Dict[str, str], 45 | timeout: Union[int, float, Timeout], 46 | verify: bool = True, 47 | proxy: Optional[str] = None, 48 | ) -> Union[SyncClient, AsyncClient]: 49 | raise NotImplementedError() 50 | 51 | def auth( 52 | self, 53 | token: Optional[str], 54 | *, 55 | username: Union[str, bytes, None] = None, 56 | password: Union[str, bytes] = "", 57 | ): 58 | """ 59 | Authenticate the client with either bearer token or basic authentication. 60 | 61 | Raises: 62 | `ValueError`: If neither authentication scheme is provided. 63 | 64 | .. note:: 65 | Bearer token is preferred if both ones are provided. 66 | """ 67 | if token: 68 | if not is_valid_jwt(token): 69 | ValueError("token must be a valid JWT authorization token") 70 | self.session.headers["Authorization"] = f"Bearer {token}" 71 | elif username: 72 | self.session.auth = BasicAuth(username, password) 73 | else: 74 | raise ValueError( 75 | "Neither bearer token or basic authentication scheme is provided" 76 | ) 77 | return self 78 | -------------------------------------------------------------------------------- /postgrest/base_request_builder.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | from json import JSONDecodeError 5 | from re import search 6 | from typing import ( 7 | Any, 8 | Dict, 9 | Generic, 10 | Iterable, 11 | List, 12 | Literal, 13 | NamedTuple, 14 | Optional, 15 | Tuple, 16 | Type, 17 | TypeVar, 18 | Union, 19 | ) 20 | 21 | from httpx import Headers, QueryParams 22 | from httpx import Response as RequestResponse 23 | from pydantic import BaseModel 24 | 25 | try: 26 | from typing import Self 27 | except ImportError: 28 | from typing_extensions import Self 29 | 30 | try: 31 | # >= 2.0.0 32 | from pydantic import field_validator 33 | except ImportError: 34 | # < 2.0.0 35 | from pydantic import validator as field_validator 36 | 37 | from .types import CountMethod, Filters, RequestMethod, ReturnMethod 38 | from .utils import AsyncClient, SyncClient, get_origin_and_cast, sanitize_param 39 | 40 | 41 | class QueryArgs(NamedTuple): 42 | # groups the method, json, headers and params for a query in a single object 43 | method: RequestMethod 44 | params: QueryParams 45 | headers: Headers 46 | json: Dict[Any, Any] 47 | 48 | 49 | def _unique_columns(json: List[Dict]): 50 | unique_keys = {key for row in json for key in row.keys()} 51 | columns = ",".join([f'"{k}"' for k in unique_keys]) 52 | return columns 53 | 54 | 55 | def _cleaned_columns(columns: Tuple[str, ...]) -> str: 56 | quoted = False 57 | cleaned = [] 58 | 59 | for column in columns: 60 | clean_column = "" 61 | for char in column: 62 | if char.isspace() and not quoted: 63 | continue 64 | if char == '"': 65 | quoted = not quoted 66 | clean_column += char 67 | cleaned.append(clean_column) 68 | 69 | return ",".join(cleaned) 70 | 71 | 72 | def pre_select( 73 | *columns: str, 74 | count: Optional[CountMethod] = None, 75 | head: Optional[bool] = None, 76 | ) -> QueryArgs: 77 | method = RequestMethod.HEAD if head else RequestMethod.GET 78 | cleaned_columns = _cleaned_columns(columns or "*") 79 | params = QueryParams({"select": cleaned_columns}) 80 | 81 | headers = Headers({"Prefer": f"count={count}"}) if count else Headers() 82 | return QueryArgs(method, params, headers, {}) 83 | 84 | 85 | def pre_insert( 86 | json: Union[dict, list], 87 | *, 88 | count: Optional[CountMethod], 89 | returning: ReturnMethod, 90 | upsert: bool, 91 | default_to_null: bool = True, 92 | ) -> QueryArgs: 93 | prefer_headers = [f"return={returning}"] 94 | if count: 95 | prefer_headers.append(f"count={count}") 96 | if upsert: 97 | prefer_headers.append("resolution=merge-duplicates") 98 | if not default_to_null: 99 | prefer_headers.append("missing=default") 100 | headers = Headers({"Prefer": ",".join(prefer_headers)}) 101 | # Adding 'columns' query parameters 102 | query_params = {} 103 | if isinstance(json, list): 104 | query_params = {"columns": _unique_columns(json)} 105 | return QueryArgs(RequestMethod.POST, QueryParams(query_params), headers, json) 106 | 107 | 108 | def pre_upsert( 109 | json: Union[dict, list], 110 | *, 111 | count: Optional[CountMethod], 112 | returning: ReturnMethod, 113 | ignore_duplicates: bool, 114 | on_conflict: str = "", 115 | default_to_null: bool = True, 116 | ) -> QueryArgs: 117 | query_params = {} 118 | prefer_headers = [f"return={returning}"] 119 | if count: 120 | prefer_headers.append(f"count={count}") 121 | resolution = "ignore" if ignore_duplicates else "merge" 122 | prefer_headers.append(f"resolution={resolution}-duplicates") 123 | if not default_to_null: 124 | prefer_headers.append("missing=default") 125 | headers = Headers({"Prefer": ",".join(prefer_headers)}) 126 | if on_conflict: 127 | query_params["on_conflict"] = on_conflict 128 | # Adding 'columns' query parameters 129 | if isinstance(json, list): 130 | query_params["columns"] = _unique_columns(json) 131 | return QueryArgs(RequestMethod.POST, QueryParams(query_params), headers, json) 132 | 133 | 134 | def pre_update( 135 | json: dict, 136 | *, 137 | count: Optional[CountMethod], 138 | returning: ReturnMethod, 139 | ) -> QueryArgs: 140 | prefer_headers = [f"return={returning}"] 141 | if count: 142 | prefer_headers.append(f"count={count}") 143 | headers = Headers({"Prefer": ",".join(prefer_headers)}) 144 | return QueryArgs(RequestMethod.PATCH, QueryParams(), headers, json) 145 | 146 | 147 | def pre_delete( 148 | *, 149 | count: Optional[CountMethod], 150 | returning: ReturnMethod, 151 | ) -> QueryArgs: 152 | prefer_headers = [f"return={returning}"] 153 | if count: 154 | prefer_headers.append(f"count={count}") 155 | headers = Headers({"Prefer": ",".join(prefer_headers)}) 156 | return QueryArgs(RequestMethod.DELETE, QueryParams(), headers, {}) 157 | 158 | 159 | _ReturnT = TypeVar("_ReturnT") 160 | 161 | 162 | # the APIResponse.data is marked as _ReturnT instead of list[_ReturnT] 163 | # as it is also returned in the case of rpc() calls; and rpc calls do not 164 | # necessarily return lists. 165 | # https://github.com/supabase-community/postgrest-py/issues/200 166 | class APIResponse(BaseModel, Generic[_ReturnT]): 167 | data: List[_ReturnT] 168 | """The data returned by the query.""" 169 | count: Optional[int] = None 170 | """The number of rows returned.""" 171 | 172 | @field_validator("data") 173 | @classmethod 174 | def raise_when_api_error(cls: Type[Self], value: Any) -> Any: 175 | if isinstance(value, dict) and value.get("message"): 176 | raise ValueError("You are passing an API error to the data field.") 177 | return value 178 | 179 | @staticmethod 180 | def _get_count_from_content_range_header( 181 | content_range_header: str, 182 | ) -> Optional[int]: 183 | content_range = content_range_header.split("/") 184 | return None if len(content_range) < 2 else int(content_range[1]) 185 | 186 | @staticmethod 187 | def _is_count_in_prefer_header(prefer_header: str) -> bool: 188 | pattern = f"count=({'|'.join([cm.value for cm in CountMethod])})" 189 | return bool(search(pattern, prefer_header)) 190 | 191 | @classmethod 192 | def _get_count_from_http_request_response( 193 | cls: Type[Self], 194 | request_response: RequestResponse, 195 | ) -> Optional[int]: 196 | prefer_header: Optional[str] = request_response.request.headers.get("prefer") 197 | if not prefer_header: 198 | return None 199 | is_count_in_prefer_header = cls._is_count_in_prefer_header(prefer_header) 200 | content_range_header: Optional[str] = request_response.headers.get( 201 | "content-range" 202 | ) 203 | return ( 204 | cls._get_count_from_content_range_header(content_range_header) 205 | if (is_count_in_prefer_header and content_range_header) 206 | else None 207 | ) 208 | 209 | @classmethod 210 | def from_http_request_response( 211 | cls: Type[Self], request_response: RequestResponse 212 | ) -> Self: 213 | count = cls._get_count_from_http_request_response(request_response) 214 | try: 215 | data = request_response.json() 216 | except JSONDecodeError: 217 | data = request_response.text if len(request_response.text) > 0 else [] 218 | # the type-ignore here is as pydantic needs us to pass the type parameter 219 | # here explicitly, but pylance already knows that cls is correctly parametrized 220 | return cls[_ReturnT](data=data, count=count) # type: ignore 221 | 222 | @classmethod 223 | def from_dict(cls: Type[Self], dict: Dict[str, Any]) -> Self: 224 | keys = dict.keys() 225 | assert len(keys) == 3 and "data" in keys and "count" in keys and "error" in keys 226 | return cls[_ReturnT]( # type: ignore 227 | data=dict.get("data"), count=dict.get("count"), error=dict.get("error") 228 | ) 229 | 230 | 231 | class SingleAPIResponse(APIResponse[_ReturnT], Generic[_ReturnT]): 232 | data: _ReturnT # type: ignore 233 | """The data returned by the query.""" 234 | 235 | @classmethod 236 | def from_http_request_response( 237 | cls: Type[Self], request_response: RequestResponse 238 | ) -> Self: 239 | count = cls._get_count_from_http_request_response(request_response) 240 | try: 241 | data = request_response.json() 242 | except JSONDecodeError: 243 | data = request_response.text if len(request_response.text) > 0 else [] 244 | return cls[_ReturnT](data=data, count=count) # type: ignore 245 | 246 | @classmethod 247 | def from_dict(cls: Type[Self], dict: Dict[str, Any]) -> Self: 248 | keys = dict.keys() 249 | assert len(keys) == 3 and "data" in keys and "count" in keys and "error" in keys 250 | return cls[_ReturnT]( # type: ignore 251 | data=dict.get("data"), count=dict.get("count"), error=dict.get("error") 252 | ) 253 | 254 | 255 | class BaseFilterRequestBuilder(Generic[_ReturnT]): 256 | def __init__( 257 | self, 258 | session: Union[AsyncClient, SyncClient], 259 | headers: Headers, 260 | params: QueryParams, 261 | ) -> None: 262 | self.session = session 263 | self.headers = headers 264 | self.params = params 265 | self.negate_next = False 266 | 267 | @property 268 | def not_(self: Self) -> Self: 269 | """Whether the filter applied next should be negated.""" 270 | self.negate_next = True 271 | return self 272 | 273 | def filter(self: Self, column: str, operator: str, criteria: str) -> Self: 274 | """Apply filters on a query. 275 | 276 | Args: 277 | column: The name of the column to apply a filter on 278 | operator: The operator to use while filtering 279 | criteria: The value to filter by 280 | """ 281 | if self.negate_next is True: 282 | self.negate_next = False 283 | operator = f"{Filters.NOT}.{operator}" 284 | key, val = sanitize_param(column), f"{operator}.{criteria}" 285 | self.params = self.params.add(key, val) 286 | return self 287 | 288 | def eq(self: Self, column: str, value: Any) -> Self: 289 | """An 'equal to' filter. 290 | 291 | Args: 292 | column: The name of the column to apply a filter on 293 | value: The value to filter by 294 | """ 295 | return self.filter(column, Filters.EQ, value) 296 | 297 | def neq(self: Self, column: str, value: Any) -> Self: 298 | """A 'not equal to' filter 299 | 300 | Args: 301 | column: The name of the column to apply a filter on 302 | value: The value to filter by 303 | """ 304 | return self.filter(column, Filters.NEQ, value) 305 | 306 | def gt(self: Self, column: str, value: Any) -> Self: 307 | """A 'greater than' filter 308 | 309 | Args: 310 | column: The name of the column to apply a filter on 311 | value: The value to filter by 312 | """ 313 | return self.filter(column, Filters.GT, value) 314 | 315 | def gte(self: Self, column: str, value: Any) -> Self: 316 | """A 'greater than or equal to' filter 317 | 318 | Args: 319 | column: The name of the column to apply a filter on 320 | value: The value to filter by 321 | """ 322 | return self.filter(column, Filters.GTE, value) 323 | 324 | def lt(self: Self, column: str, value: Any) -> Self: 325 | """A 'less than' filter 326 | 327 | Args: 328 | column: The name of the column to apply a filter on 329 | value: The value to filter by 330 | """ 331 | return self.filter(column, Filters.LT, value) 332 | 333 | def lte(self: Self, column: str, value: Any) -> Self: 334 | """A 'less than or equal to' filter 335 | 336 | Args: 337 | column: The name of the column to apply a filter on 338 | value: The value to filter by 339 | """ 340 | return self.filter(column, Filters.LTE, value) 341 | 342 | def is_(self: Self, column: str, value: Any) -> Self: 343 | """An 'is' filter 344 | 345 | Args: 346 | column: The name of the column to apply a filter on 347 | value: The value to filter by 348 | """ 349 | if value is None: 350 | value = "null" 351 | return self.filter(column, Filters.IS, value) 352 | 353 | def like(self: Self, column: str, pattern: str) -> Self: 354 | """A 'LIKE' filter, to use for pattern matching. 355 | 356 | Args: 357 | column: The name of the column to apply a filter on 358 | pattern: The pattern to filter by 359 | """ 360 | return self.filter(column, Filters.LIKE, pattern) 361 | 362 | def like_all_of(self: Self, column: str, pattern: str) -> Self: 363 | """A 'LIKE' filter, to use for pattern matching. 364 | 365 | Args: 366 | column: The name of the column to apply a filter on 367 | pattern: The pattern to filter by 368 | """ 369 | 370 | return self.filter(column, Filters.LIKE_ALL, f"{{{pattern}}}") 371 | 372 | def like_any_of(self: Self, column: str, pattern: str) -> Self: 373 | """A 'LIKE' filter, to use for pattern matching. 374 | 375 | Args: 376 | column: The name of the column to apply a filter on 377 | pattern: The pattern to filter by 378 | """ 379 | 380 | return self.filter(column, Filters.LIKE_ANY, f"{{{pattern}}}") 381 | 382 | def ilike_all_of(self: Self, column: str, pattern: str) -> Self: 383 | """A 'ILIKE' filter, to use for pattern matching (case insensitive). 384 | 385 | Args: 386 | column: The name of the column to apply a filter on 387 | pattern: The pattern to filter by 388 | """ 389 | 390 | return self.filter(column, Filters.ILIKE_ALL, f"{{{pattern}}}") 391 | 392 | def ilike_any_of(self: Self, column: str, pattern: str) -> Self: 393 | """A 'ILIKE' filter, to use for pattern matching (case insensitive). 394 | 395 | Args: 396 | column: The name of the column to apply a filter on 397 | pattern: The pattern to filter by 398 | """ 399 | 400 | return self.filter(column, Filters.ILIKE_ANY, f"{{{pattern}}}") 401 | 402 | def ilike(self: Self, column: str, pattern: str) -> Self: 403 | """An 'ILIKE' filter, to use for pattern matching (case insensitive). 404 | 405 | Args: 406 | column: The name of the column to apply a filter on 407 | pattern: The pattern to filter by 408 | """ 409 | return self.filter(column, Filters.ILIKE, pattern) 410 | 411 | def or_(self: Self, filters: str, reference_table: Optional[str] = None) -> Self: 412 | """An 'or' filter 413 | 414 | Args: 415 | filters: The filters to use, following PostgREST syntax 416 | reference_table: Set this to filter on referenced tables instead of the parent table 417 | """ 418 | key = f"{sanitize_param(reference_table)}.or" if reference_table else "or" 419 | self.params = self.params.add(key, f"({filters})") 420 | return self 421 | 422 | def fts(self: Self, column: str, query: Any) -> Self: 423 | return self.filter(column, Filters.FTS, query) 424 | 425 | def plfts(self: Self, column: str, query: Any) -> Self: 426 | return self.filter(column, Filters.PLFTS, query) 427 | 428 | def phfts(self: Self, column: str, query: Any) -> Self: 429 | return self.filter(column, Filters.PHFTS, query) 430 | 431 | def wfts(self: Self, column: str, query: Any) -> Self: 432 | return self.filter(column, Filters.WFTS, query) 433 | 434 | def in_(self: Self, column: str, values: Iterable[Any]) -> Self: 435 | values = map(sanitize_param, values) 436 | values = ",".join(values) 437 | return self.filter(column, Filters.IN, f"({values})") 438 | 439 | def cs(self: Self, column: str, values: Iterable[Any]) -> Self: 440 | values = ",".join(values) 441 | return self.filter(column, Filters.CS, f"{{{values}}}") 442 | 443 | def cd(self: Self, column: str, values: Iterable[Any]) -> Self: 444 | values = ",".join(values) 445 | return self.filter(column, Filters.CD, f"{{{values}}}") 446 | 447 | def contains( 448 | self: Self, column: str, value: Union[Iterable[Any], str, Dict[Any, Any]] 449 | ) -> Self: 450 | if isinstance(value, str): 451 | # range types can be inclusive '[', ']' or exclusive '(', ')' so just 452 | # keep it simple and accept a string 453 | return self.filter(column, Filters.CS, value) 454 | if not isinstance(value, dict) and isinstance(value, Iterable): 455 | # Expected to be some type of iterable 456 | stringified_values = ",".join(value) 457 | return self.filter(column, Filters.CS, f"{{{stringified_values}}}") 458 | 459 | return self.filter(column, Filters.CS, json.dumps(value)) 460 | 461 | def contained_by( 462 | self: Self, column: str, value: Union[Iterable[Any], str, Dict[Any, Any]] 463 | ) -> Self: 464 | if isinstance(value, str): 465 | # range 466 | return self.filter(column, Filters.CD, value) 467 | if not isinstance(value, dict) and isinstance(value, Iterable): 468 | stringified_values = ",".join(value) 469 | return self.filter(column, Filters.CD, f"{{{stringified_values}}}") 470 | return self.filter(column, Filters.CD, json.dumps(value)) 471 | 472 | def ov(self: Self, column: str, value: Iterable[Any]) -> Self: 473 | if isinstance(value, str): 474 | # range types can be inclusive '[', ']' or exclusive '(', ')' so just 475 | # keep it simple and accept a string 476 | return self.filter(column, Filters.OV, value) 477 | if not isinstance(value, dict) and isinstance(value, Iterable): 478 | # Expected to be some type of iterable 479 | stringified_values = ",".join(value) 480 | return self.filter(column, Filters.OV, f"{{{stringified_values}}}") 481 | return self.filter(column, Filters.OV, json.dumps(value)) 482 | 483 | def sl(self: Self, column: str, range: Tuple[int, int]) -> Self: 484 | return self.filter(column, Filters.SL, f"({range[0]},{range[1]})") 485 | 486 | def sr(self: Self, column: str, range: Tuple[int, int]) -> Self: 487 | return self.filter(column, Filters.SR, f"({range[0]},{range[1]})") 488 | 489 | def nxl(self: Self, column: str, range: Tuple[int, int]) -> Self: 490 | return self.filter(column, Filters.NXL, f"({range[0]},{range[1]})") 491 | 492 | def nxr(self: Self, column: str, range: Tuple[int, int]) -> Self: 493 | return self.filter(column, Filters.NXR, f"({range[0]},{range[1]})") 494 | 495 | def adj(self: Self, column: str, range: Tuple[int, int]) -> Self: 496 | return self.filter(column, Filters.ADJ, f"({range[0]},{range[1]})") 497 | 498 | def range_gt(self: Self, column: str, range: Tuple[int, int]) -> Self: 499 | return self.sr(column, range) 500 | 501 | def range_gte(self: Self, column: str, range: Tuple[int, int]) -> Self: 502 | return self.nxl(column, range) 503 | 504 | def range_lt(self: Self, column: str, range: Tuple[int, int]) -> Self: 505 | return self.sl(column, range) 506 | 507 | def range_lte(self: Self, column: str, range: Tuple[int, int]) -> Self: 508 | return self.nxr(column, range) 509 | 510 | def range_adjacent(self: Self, column: str, range: Tuple[int, int]) -> Self: 511 | return self.adj(column, range) 512 | 513 | def overlaps(self: Self, column: str, values: Iterable[Any]) -> Self: 514 | return self.ov(column, values) 515 | 516 | def match(self: Self, query: Dict[str, Any]) -> Self: 517 | updated_query = self 518 | 519 | if not query: 520 | raise ValueError( 521 | "query dictionary should contain at least one key-value pair" 522 | ) 523 | 524 | for key, value in query.items(): 525 | updated_query = self.eq(key, value) 526 | 527 | return updated_query 528 | 529 | 530 | class BaseSelectRequestBuilder(BaseFilterRequestBuilder[_ReturnT]): 531 | def __init__( 532 | self, 533 | session: Union[AsyncClient, SyncClient], 534 | headers: Headers, 535 | params: QueryParams, 536 | ) -> None: 537 | # Generic[T] is an instance of typing._GenericAlias, so doing Generic[T].__init__ 538 | # tries to call _GenericAlias.__init__ - which is the wrong method 539 | # The __origin__ attribute of the _GenericAlias is the actual class 540 | get_origin_and_cast(BaseFilterRequestBuilder[_ReturnT]).__init__( 541 | self, session, headers, params 542 | ) 543 | 544 | def explain( 545 | self: Self, 546 | analyze: bool = False, 547 | verbose: bool = False, 548 | settings: bool = False, 549 | buffers: bool = False, 550 | wal: bool = False, 551 | format: Literal["text", "json"] = "text", 552 | ) -> Self: 553 | options = [ 554 | key 555 | for key, value in locals().items() 556 | if key not in ["self", "format"] and value 557 | ] 558 | options_str = "|".join(options) 559 | self.headers["Accept"] = ( 560 | f"application/vnd.pgrst.plan+{format}; options={options_str}" 561 | ) 562 | return self 563 | 564 | def order( 565 | self: Self, 566 | column: str, 567 | *, 568 | desc: bool = False, 569 | nullsfirst: Optional[bool] = None, 570 | foreign_table: Optional[str] = None, 571 | ) -> Self: 572 | """Sort the returned rows in some specific order. 573 | 574 | Args: 575 | column: The column to order by 576 | desc: Whether the rows should be ordered in descending order or not. 577 | nullsfirst: nullsfirst 578 | foreign_table: Foreign table name whose results are to be ordered. 579 | .. versionchanged:: 0.10.3 580 | Allow ordering results for foreign tables with the foreign_table parameter. 581 | """ 582 | key = f"{foreign_table}.order" if foreign_table else "order" 583 | existing_order = self.params.get(key) 584 | 585 | self.params = self.params.set( 586 | key, 587 | f"{existing_order + ',' if existing_order else ''}" 588 | + f"{column}.{'desc' if desc else 'asc'}" 589 | + ( 590 | f".{'nullsfirst' if nullsfirst else 'nullslast'}" 591 | if nullsfirst is not None 592 | else "" 593 | ), 594 | ) 595 | return self 596 | 597 | def limit(self: Self, size: int, *, foreign_table: Optional[str] = None) -> Self: 598 | """Limit the number of rows returned by a query. 599 | 600 | Args: 601 | size: The number of rows to be returned 602 | foreign_table: Foreign table name to limit 603 | .. versionchanged:: 0.10.3 604 | Allow limiting results returned for foreign tables with the foreign_table parameter. 605 | """ 606 | self.params = self.params.add( 607 | f"{foreign_table}.limit" if foreign_table else "limit", 608 | size, 609 | ) 610 | return self 611 | 612 | def offset(self: _FilterT, size: int) -> _FilterT: 613 | """Set the starting row index returned by a query. 614 | Args: 615 | size: The number of the row to start at 616 | """ 617 | self.params = self.params.add( 618 | "offset", 619 | size, 620 | ) 621 | return self 622 | 623 | def range( 624 | self: Self, start: int, end: int, foreign_table: Optional[str] = None 625 | ) -> Self: 626 | self.params = self.params.add( 627 | f"{foreign_table}.offset" if foreign_table else "offset", start 628 | ) 629 | self.params = self.params.add( 630 | f"{foreign_table}.limit" if foreign_table else "limit", 631 | end - start + 1, 632 | ) 633 | return self 634 | 635 | 636 | class BaseRPCRequestBuilder(BaseSelectRequestBuilder[_ReturnT]): 637 | def __init__( 638 | self, 639 | session: Union[AsyncClient, SyncClient], 640 | headers: Headers, 641 | params: QueryParams, 642 | ) -> None: 643 | # Generic[T] is an instance of typing._GenericAlias, so doing Generic[T].__init__ 644 | # tries to call _GenericAlias.__init__ - which is the wrong method 645 | # The __origin__ attribute of the _GenericAlias is the actual class 646 | get_origin_and_cast(BaseSelectRequestBuilder[_ReturnT]).__init__( 647 | self, session, headers, params 648 | ) 649 | 650 | def select( 651 | self, 652 | *columns: str, 653 | ) -> Self: 654 | """Run a SELECT query. 655 | 656 | Args: 657 | *columns: The names of the columns to fetch. 658 | Returns: 659 | :class:`BaseSelectRequestBuilder` 660 | """ 661 | method, params, headers, json = pre_select(*columns, count=None) 662 | self.params = self.params.add("select", params.get("select")) 663 | if self.headers.get("Prefer"): 664 | self.headers["Prefer"] += ",return=representation" 665 | else: 666 | self.headers["Prefer"] = "return=representation" 667 | 668 | return self 669 | 670 | def single(self) -> Self: 671 | """Specify that the query will only return a single row in response. 672 | 673 | .. caution:: 674 | The API will raise an error if the query returned more than one row. 675 | """ 676 | self.headers["Accept"] = "application/vnd.pgrst.object+json" 677 | return self 678 | 679 | def maybe_single(self) -> Self: 680 | """Retrieves at most one row from the result. Result must be at most one row (e.g. using `eq` on a UNIQUE column), otherwise this will result in an error.""" 681 | self.headers["Accept"] = "application/vnd.pgrst.object+json" 682 | return self 683 | 684 | def csv(self) -> Self: 685 | """Specify that the query must retrieve data as a single CSV string.""" 686 | self.headers["Accept"] = "text/csv" 687 | return self 688 | -------------------------------------------------------------------------------- /postgrest/constants.py: -------------------------------------------------------------------------------- 1 | DEFAULT_POSTGREST_CLIENT_HEADERS = { 2 | "Accept": "application/json", 3 | "Content-Type": "application/json", 4 | } 5 | 6 | DEFAULT_POSTGREST_CLIENT_TIMEOUT = 120 7 | -------------------------------------------------------------------------------- /postgrest/deprecated_client.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from deprecation import deprecated 4 | 5 | from ._async.client import AsyncPostgrestClient 6 | from .version import __version__ 7 | 8 | 9 | class Client(AsyncPostgrestClient): 10 | """Alias to PostgrestClient.""" 11 | 12 | @deprecated("0.2.0", "1.0.0", __version__, "Use PostgrestClient instead") 13 | def __init__(self, *args, **kwargs): 14 | super().__init__(*args, **kwargs) 15 | 16 | 17 | PostgrestClient = Client 18 | -------------------------------------------------------------------------------- /postgrest/deprecated_get_request_builder.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from deprecation import deprecated 4 | 5 | from ._async.request_builder import AsyncSelectRequestBuilder 6 | from .version import __version__ 7 | 8 | 9 | class GetRequestBuilder(AsyncSelectRequestBuilder): 10 | """Alias to SelectRequestBuilder.""" 11 | 12 | @deprecated("0.4.0", "1.0.0", __version__, "Use SelectRequestBuilder instead") 13 | def __init__(self, *args, **kwargs): 14 | super().__init__(*args, **kwargs) 15 | -------------------------------------------------------------------------------- /postgrest/exceptions.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Optional 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class APIErrorFromJSON(BaseModel): 7 | """ 8 | A pydantic object to validate an error info object 9 | from a json string. 10 | """ 11 | 12 | message: Optional[str] 13 | """The error message.""" 14 | code: Optional[str] 15 | """The error code.""" 16 | hint: Optional[str] 17 | """The error hint.""" 18 | details: Optional[str] 19 | """The error details.""" 20 | 21 | 22 | class APIError(Exception): 23 | """ 24 | Base exception for all API errors. 25 | """ 26 | 27 | _raw_error: Dict[str, str] 28 | message: Optional[str] 29 | """The error message.""" 30 | code: Optional[str] 31 | """The error code.""" 32 | hint: Optional[str] 33 | """The error hint.""" 34 | details: Optional[str] 35 | """The error details.""" 36 | 37 | def __init__(self, error: Dict[str, str]) -> None: 38 | self._raw_error = error 39 | self.message = error.get("message") 40 | self.code = error.get("code") 41 | self.hint = error.get("hint") 42 | self.details = error.get("details") 43 | Exception.__init__(self, str(self)) 44 | 45 | def __repr__(self) -> str: 46 | error_text = f"Error {self.code}:" if self.code else "" 47 | message_text = f"\nMessage: {self.message}" if self.message else "" 48 | hint_text = f"\nHint: {self.hint}" if self.hint else "" 49 | details_text = f"\nDetails: {self.details}" if self.details else "" 50 | complete_error_text = f"{error_text}{message_text}{hint_text}{details_text}" 51 | return complete_error_text or "Empty error" 52 | 53 | def json(self) -> Dict[str, str]: 54 | """Convert the error into a dictionary. 55 | 56 | Returns: 57 | :class:`dict` 58 | """ 59 | return self._raw_error 60 | 61 | 62 | def generate_default_error_message(r): 63 | return { 64 | "message": "JSON could not be generated", 65 | "code": r.status_code, 66 | "hint": "Refer to full message for details", 67 | "details": str(r.content), 68 | } 69 | -------------------------------------------------------------------------------- /postgrest/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/supabase/postgrest-py/3547d62831b1d3a04adf3a2f80ba991840e5270d/postgrest/py.typed -------------------------------------------------------------------------------- /postgrest/types.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | 5 | if sys.version_info >= (3, 11): 6 | from enum import StrEnum 7 | else: 8 | from strenum import StrEnum 9 | 10 | 11 | class CountMethod(StrEnum): 12 | exact = "exact" 13 | planned = "planned" 14 | estimated = "estimated" 15 | 16 | 17 | class Filters(StrEnum): 18 | NOT = "not" 19 | EQ = "eq" 20 | NEQ = "neq" 21 | GT = "gt" 22 | GTE = "gte" 23 | LT = "lt" 24 | LTE = "lte" 25 | IS = "is" 26 | LIKE = "like" 27 | LIKE_ALL = "like(all)" 28 | LIKE_ANY = "like(any)" 29 | ILIKE = "ilike" 30 | ILIKE_ALL = "ilike(all)" 31 | ILIKE_ANY = "ilike(any)" 32 | FTS = "fts" 33 | PLFTS = "plfts" 34 | PHFTS = "phfts" 35 | WFTS = "wfts" 36 | IN = "in" 37 | CS = "cs" 38 | CD = "cd" 39 | OV = "ov" 40 | SL = "sl" 41 | SR = "sr" 42 | NXL = "nxl" 43 | NXR = "nxr" 44 | ADJ = "adj" 45 | 46 | 47 | class RequestMethod(StrEnum): 48 | GET = "GET" 49 | POST = "POST" 50 | PATCH = "PATCH" 51 | PUT = "PUT" 52 | DELETE = "DELETE" 53 | HEAD = "HEAD" 54 | 55 | 56 | class ReturnMethod(StrEnum): 57 | minimal = "minimal" 58 | representation = "representation" 59 | -------------------------------------------------------------------------------- /postgrest/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | from typing import Any, Type, TypeVar, cast, get_origin 5 | from urllib.parse import urlparse 6 | 7 | from httpx import AsyncClient # noqa: F401 8 | from httpx import Client as BaseClient # noqa: F401 9 | 10 | BASE64URL_REGEX = r"^([a-z0-9_-]{4})*($|[a-z0-9_-]{3}$|[a-z0-9_-]{2}$)$" 11 | 12 | 13 | class SyncClient(BaseClient): 14 | def aclose(self) -> None: 15 | self.close() 16 | 17 | 18 | def sanitize_param(param: Any) -> str: 19 | param_str = str(param) 20 | reserved_chars = ",:()" 21 | if any(char in param_str for char in reserved_chars): 22 | return f'"{param_str}"' 23 | return param_str 24 | 25 | 26 | def sanitize_pattern_param(pattern: str) -> str: 27 | return sanitize_param(pattern.replace("%", "*")) 28 | 29 | 30 | _T = TypeVar("_T") 31 | 32 | 33 | def get_origin_and_cast(typ: type[type[_T]]) -> type[_T]: 34 | # Base[T] is an instance of typing._GenericAlias, so doing Base[T].__init__ 35 | # tries to call _GenericAlias.__init__ - which is the wrong method 36 | # get_origin(Base[T]) returns Base 37 | # This function casts Base back to Base[T] to maintain type-safety 38 | # while still allowing us to access the methods of `Base` at runtime 39 | # See: definitions of request builders that use multiple-inheritance 40 | # like AsyncFilterRequestBuilder 41 | return cast(Type[_T], get_origin(typ)) 42 | 43 | 44 | def is_http_url(url: str) -> bool: 45 | return urlparse(url).scheme in {"https", "http"} 46 | 47 | 48 | def is_valid_jwt(value: str) -> bool: 49 | """Checks if value looks like a JWT, does not do any extra parsing.""" 50 | if not isinstance(value, str): 51 | return False 52 | 53 | # Remove trailing whitespaces if any. 54 | value = value.strip() 55 | 56 | # Remove "Bearer " prefix if any. 57 | if value.startswith("Bearer "): 58 | value = value[7:] 59 | 60 | # Valid JWT must have 2 dots (Header.Paylod.Signature) 61 | if value.count(".") != 2: 62 | return False 63 | 64 | for part in value.split("."): 65 | if not re.search(BASE64URL_REGEX, part, re.IGNORECASE): 66 | return False 67 | 68 | return True 69 | -------------------------------------------------------------------------------- /postgrest/version.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.0.2" # {x-release-please-version} 2 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "postgrest" 3 | version = "1.0.2" # {x-release-please-version} 4 | description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." 5 | authors = ["Lương Quang Mạnh ", "Joel Lee ", "Anand", "Oliver Rice", "Andrew Smith "] 6 | homepage = "https://github.com/supabase/postgrest-py" 7 | repository = "https://github.com/supabase/postgrest-py" 8 | documentation = "https://postgrest-py.rtfd.io" 9 | readme = "README.md" 10 | license = "MIT" 11 | classifiers = [ 12 | "Programming Language :: Python :: 3", 13 | "License :: OSI Approved :: MIT License", 14 | "Operating System :: OS Independent" 15 | ] 16 | packages = [ 17 | { include = "postgrest" }, 18 | ] 19 | 20 | [tool.poetry.dependencies] 21 | python = "^3.9" 22 | httpx = {version = ">=0.26,<0.29", extras = ["http2"]} 23 | deprecation = "^2.1.0" 24 | pydantic = ">=1.9,<3.0" 25 | strenum = {version = "^0.4.9", python = "<3.11"} 26 | 27 | [tool.poetry.dev-dependencies] 28 | pytest = "^8.3.5" 29 | flake8 = "^7.2.0" 30 | black = "^25.1" 31 | isort = "^6.0.1" 32 | pre-commit = "^4.2.0" 33 | pytest-cov = "^6.1.1" 34 | pytest-depends = "^1.0.1" 35 | pytest-asyncio = "^0.26.0" 36 | unasync-cli = { git = "https://github.com/supabase-community/unasync-cli.git", branch = "main" } 37 | 38 | [tool.poetry.group.docs] 39 | optional = true 40 | 41 | [tool.poetry.group.docs.dependencies] 42 | sphinx = "^7.1.2" 43 | furo = ">=2023.9.10,<2025.0.0" 44 | 45 | [tool.pytest.ini_options] 46 | asyncio_mode = "auto" 47 | 48 | [build-system] 49 | requires = ["poetry-core>=1.0.0"] 50 | build-backend = "poetry.core.masonry.api" 51 | -------------------------------------------------------------------------------- /release-please-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "last-release-sha": "9c1268e8011cf141f85cf2bbac682f86acfd06fa", 3 | "packages": { 4 | ".": { 5 | "changelog-path": "CHANGELOG.md", 6 | "release-type": "python" 7 | } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/supabase/postgrest-py/3547d62831b1d3a04adf3a2f80ba991840e5270d/tests/__init__.py -------------------------------------------------------------------------------- /tests/_async/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/supabase/postgrest-py/3547d62831b1d3a04adf3a2f80ba991840e5270d/tests/_async/__init__.py -------------------------------------------------------------------------------- /tests/_async/client.py: -------------------------------------------------------------------------------- 1 | from postgrest import AsyncPostgrestClient 2 | 3 | REST_URL = "http://127.0.0.1:3000" 4 | 5 | 6 | def rest_client(): 7 | return AsyncPostgrestClient( 8 | base_url=REST_URL, 9 | ) 10 | -------------------------------------------------------------------------------- /tests/_async/test_client.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | import pytest 4 | from httpx import BasicAuth, Headers, Request, Response 5 | 6 | from postgrest import AsyncPostgrestClient 7 | from postgrest.exceptions import APIError 8 | 9 | 10 | @pytest.fixture 11 | async def postgrest_client(): 12 | async with AsyncPostgrestClient("https://example.com") as client: 13 | yield client 14 | 15 | 16 | class TestConstructor: 17 | def test_simple(self, postgrest_client: AsyncPostgrestClient): 18 | session = postgrest_client.session 19 | 20 | assert session.base_url == "https://example.com" 21 | headers = Headers( 22 | { 23 | "Accept": "application/json", 24 | "Content-Type": "application/json", 25 | "Accept-Profile": "public", 26 | "Content-Profile": "public", 27 | } 28 | ) 29 | assert session.headers.items() >= headers.items() 30 | 31 | @pytest.mark.asyncio 32 | async def test_custom_headers(self): 33 | async with AsyncPostgrestClient( 34 | "https://example.com", schema="pub", headers={"Custom-Header": "value"} 35 | ) as client: 36 | session = client.session 37 | 38 | assert session.base_url == "https://example.com" 39 | headers = Headers( 40 | { 41 | "Accept-Profile": "pub", 42 | "Content-Profile": "pub", 43 | "Custom-Header": "value", 44 | } 45 | ) 46 | assert session.headers.items() >= headers.items() 47 | 48 | 49 | class TestAuth: 50 | def test_auth_token(self, postgrest_client: AsyncPostgrestClient): 51 | postgrest_client.auth("s3cr3t") 52 | session = postgrest_client.session 53 | 54 | assert session.headers["Authorization"] == "Bearer s3cr3t" 55 | 56 | def test_auth_basic(self, postgrest_client: AsyncPostgrestClient): 57 | postgrest_client.auth(None, username="admin", password="s3cr3t") 58 | session = postgrest_client.session 59 | 60 | assert isinstance(session.auth, BasicAuth) 61 | assert session.auth._auth_header == BasicAuth("admin", "s3cr3t")._auth_header 62 | 63 | 64 | def test_schema(postgrest_client: AsyncPostgrestClient): 65 | client = postgrest_client.schema("private") 66 | session = client.session 67 | subheaders = { 68 | "accept-profile": "private", 69 | "content-profile": "private", 70 | } 71 | 72 | assert subheaders.items() < dict(session.headers).items() 73 | 74 | 75 | @pytest.mark.asyncio 76 | async def test_params_purged_after_execute(postgrest_client: AsyncPostgrestClient): 77 | assert len(postgrest_client.session.params) == 0 78 | with pytest.raises(APIError): 79 | await postgrest_client.from_("test").select("a", "b").eq("c", "d").execute() 80 | assert len(postgrest_client.session.params) == 0 81 | 82 | 83 | @pytest.mark.asyncio 84 | async def test_response_status_code_outside_ok(postgrest_client: AsyncPostgrestClient): 85 | with patch( 86 | "postgrest._async.request_builder.AsyncSelectRequestBuilder.execute", 87 | side_effect=APIError( 88 | { 89 | "message": "mock error", 90 | "code": "400", 91 | "hint": "mock", 92 | "details": "mock", 93 | "errors": [{"code": 400}], 94 | } 95 | ), 96 | ): 97 | with pytest.raises(APIError) as exc_info: 98 | await postgrest_client.from_("test").select("a", "b").eq( 99 | "c", "d" 100 | ).execute() # gives status_code = 400 101 | exc_response = exc_info.value.json() 102 | assert not exc_response.get("success") 103 | assert isinstance(exc_response.get("errors"), list) 104 | assert ( 105 | isinstance(exc_response["errors"][0], dict) 106 | and "code" in exc_response["errors"][0] 107 | ) 108 | assert exc_response["errors"][0].get("code") == 400 109 | 110 | 111 | @pytest.mark.asyncio 112 | async def test_response_maybe_single(postgrest_client: AsyncPostgrestClient): 113 | with patch( 114 | "postgrest._async.request_builder.AsyncSingleRequestBuilder.execute", 115 | side_effect=APIError( 116 | {"message": "mock error", "code": "400", "hint": "mock", "details": "mock"} 117 | ), 118 | ): 119 | client = ( 120 | postgrest_client.from_("test").select("a", "b").eq("c", "d").maybe_single() 121 | ) 122 | assert "Accept" in client.headers 123 | assert client.headers.get("Accept") == "application/vnd.pgrst.object+json" 124 | with pytest.raises(APIError) as exc_info: 125 | await client.execute() 126 | assert isinstance(exc_info, pytest.ExceptionInfo) 127 | exc_response = exc_info.value.json() 128 | assert isinstance(exc_response.get("message"), str) 129 | assert "code" in exc_response and int(exc_response["code"]) == 204 130 | 131 | 132 | # https://github.com/supabase/postgrest-py/issues/595 133 | @pytest.mark.asyncio 134 | async def test_response_client_invalid_response_but_valid_json( 135 | postgrest_client: AsyncPostgrestClient, 136 | ): 137 | with patch( 138 | "httpx._client.AsyncClient.request", 139 | return_value=Response( 140 | status_code=502, 141 | text='"gateway error: Error: Network connection lost."', # quotes makes this text a valid non-dict JSON object 142 | request=Request(method="GET", url="http://example.com"), 143 | ), 144 | ): 145 | client = postgrest_client.from_("test").select("a", "b").eq("c", "d").single() 146 | assert "Accept" in client.headers 147 | assert client.headers.get("Accept") == "application/vnd.pgrst.object+json" 148 | with pytest.raises(APIError) as exc_info: 149 | await client.execute() 150 | assert isinstance(exc_info, pytest.ExceptionInfo) 151 | exc_response = exc_info.value.json() 152 | assert isinstance(exc_response.get("message"), str) 153 | assert exc_response.get("message") == "JSON could not be generated" 154 | assert "code" in exc_response and int(exc_response["code"]) == 502 155 | -------------------------------------------------------------------------------- /tests/_async/test_filter_request_builder.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from httpx import Headers, QueryParams 3 | 4 | from postgrest import AsyncFilterRequestBuilder 5 | from postgrest.utils import AsyncClient 6 | 7 | 8 | @pytest.fixture 9 | async def filter_request_builder(): 10 | async with AsyncClient() as client: 11 | yield AsyncFilterRequestBuilder( 12 | client, "/example_table", "GET", Headers(), QueryParams(), {} 13 | ) 14 | 15 | 16 | def test_constructor(filter_request_builder: AsyncFilterRequestBuilder): 17 | builder = filter_request_builder 18 | 19 | assert builder.path == "/example_table" 20 | assert len(builder.headers) == 0 21 | assert len(builder.params) == 0 22 | assert builder.http_method == "GET" 23 | assert builder.json == None 24 | assert not builder.negate_next 25 | 26 | 27 | def test_not_(filter_request_builder): 28 | builder = filter_request_builder.not_ 29 | 30 | assert builder.negate_next 31 | 32 | 33 | def test_filter(filter_request_builder): 34 | builder = filter_request_builder.filter(":col.name", "eq", "val") 35 | 36 | assert builder.params['":col.name"'] == "eq.val" 37 | 38 | 39 | @pytest.mark.parametrize( 40 | "col_name, expected_query_prefix", 41 | [ 42 | ("col:name", "%22col%3Aname%22"), 43 | ("col.name", "col.name"), 44 | ], 45 | ) 46 | def test_filter_special_characters( 47 | filter_request_builder, col_name, expected_query_prefix 48 | ): 49 | builder = filter_request_builder.filter(col_name, "eq", "val") 50 | 51 | assert str(builder.params) == f"{expected_query_prefix}=eq.val" 52 | 53 | 54 | def test_multivalued_param(filter_request_builder): 55 | builder = filter_request_builder.lte("x", "a").gte("x", "b") 56 | 57 | assert str(builder.params) == "x=lte.a&x=gte.b" 58 | 59 | 60 | def test_match(filter_request_builder): 61 | builder = filter_request_builder.match({"id": "1", "done": "false"}) 62 | assert str(builder.params) == "id=eq.1&done=eq.false" 63 | 64 | 65 | def test_equals(filter_request_builder): 66 | builder = filter_request_builder.eq("x", "a") 67 | 68 | assert str(builder.params) == "x=eq.a" 69 | 70 | 71 | def test_not_equal(filter_request_builder): 72 | builder = filter_request_builder.neq("x", "a") 73 | 74 | assert str(builder.params) == "x=neq.a" 75 | 76 | 77 | def test_greater_than(filter_request_builder): 78 | builder = filter_request_builder.gt("x", "a") 79 | 80 | assert str(builder.params) == "x=gt.a" 81 | 82 | 83 | def test_greater_than_or_equals_to(filter_request_builder): 84 | builder = filter_request_builder.gte("x", "a") 85 | 86 | assert str(builder.params) == "x=gte.a" 87 | 88 | 89 | def test_contains(filter_request_builder): 90 | builder = filter_request_builder.contains("x", "a") 91 | 92 | assert str(builder.params) == "x=cs.a" 93 | 94 | 95 | def test_contains_dictionary(filter_request_builder): 96 | builder = filter_request_builder.contains("x", {"a": "b"}) 97 | 98 | # {"a":"b"} 99 | assert str(builder.params) == "x=cs.%7B%22a%22%3A+%22b%22%7D" 100 | 101 | 102 | def test_contains_any_item(filter_request_builder): 103 | builder = filter_request_builder.contains("x", ["a", "b"]) 104 | 105 | # {a,b} 106 | assert str(builder.params) == "x=cs.%7Ba%2Cb%7D" 107 | 108 | 109 | def test_contains_in_list(filter_request_builder): 110 | builder = filter_request_builder.contains("x", '[{"a": "b"}]') 111 | 112 | # [{"a":+"b"}] (the + represents the space) 113 | assert str(builder.params) == "x=cs.%5B%7B%22a%22%3A+%22b%22%7D%5D" 114 | 115 | 116 | def test_contained_by_mixed_items(filter_request_builder): 117 | builder = filter_request_builder.contained_by("x", ["a", '["b", "c"]']) 118 | 119 | # {a,["b",+"c"]} 120 | assert str(builder.params) == "x=cd.%7Ba%2C%5B%22b%22%2C+%22c%22%5D%7D" 121 | 122 | 123 | def test_range_greater_than(filter_request_builder): 124 | builder = filter_request_builder.range_gt( 125 | "x", ["2000-01-02 08:30", "2000-01-02 09:30"] 126 | ) 127 | 128 | # {a,["b",+"c"]} 129 | assert str(builder.params) == "x=sr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" 130 | 131 | 132 | def test_range_greater_than_or_equal_to(filter_request_builder): 133 | builder = filter_request_builder.range_gte( 134 | "x", ["2000-01-02 08:30", "2000-01-02 09:30"] 135 | ) 136 | 137 | # {a,["b",+"c"]} 138 | assert str(builder.params) == "x=nxl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" 139 | 140 | 141 | def test_range_less_than(filter_request_builder): 142 | builder = filter_request_builder.range_lt( 143 | "x", ["2000-01-02 08:30", "2000-01-02 09:30"] 144 | ) 145 | 146 | # {a,["b",+"c"]} 147 | assert str(builder.params) == "x=sl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" 148 | 149 | 150 | def test_range_less_than_or_equal_to(filter_request_builder): 151 | builder = filter_request_builder.range_lte( 152 | "x", ["2000-01-02 08:30", "2000-01-02 09:30"] 153 | ) 154 | 155 | # {a,["b",+"c"]} 156 | assert str(builder.params) == "x=nxr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" 157 | 158 | 159 | def test_range_adjacent(filter_request_builder): 160 | builder = filter_request_builder.range_adjacent( 161 | "x", ["2000-01-02 08:30", "2000-01-02 09:30"] 162 | ) 163 | 164 | # {a,["b",+"c"]} 165 | assert str(builder.params) == "x=adj.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" 166 | 167 | 168 | def test_overlaps(filter_request_builder): 169 | builder = filter_request_builder.overlaps("x", ["is:closed", "severity:high"]) 170 | 171 | # {a,["b",+"c"]} 172 | assert str(builder.params) == "x=ov.%7Bis%3Aclosed%2Cseverity%3Ahigh%7D" 173 | 174 | 175 | def test_overlaps_with_timestamp_range(filter_request_builder): 176 | builder = filter_request_builder.overlaps("x", "[2000-01-01 12:45, 2000-01-01 13:15)") 177 | 178 | # {a,["b",+"c"]} 179 | assert str(builder.params) == "x=ov.%5B2000-01-01+12%3A45%2C+2000-01-01+13%3A15%29" 180 | 181 | 182 | def test_like(filter_request_builder): 183 | builder = filter_request_builder.like("x", "%a%") 184 | 185 | assert str(builder.params) == "x=like.%25a%25" 186 | 187 | 188 | def test_ilike(filter_request_builder): 189 | builder = filter_request_builder.ilike("x", "%a%") 190 | 191 | assert str(builder.params) == "x=ilike.%25a%25" 192 | 193 | 194 | def test_like_all_of(filter_request_builder): 195 | builder = filter_request_builder.like_all_of("x", "A*,*b") 196 | 197 | assert str(builder.params) == "x=like%28all%29.%7BA%2A%2C%2Ab%7D" 198 | 199 | 200 | def test_like_any_of(filter_request_builder): 201 | builder = filter_request_builder.like_any_of("x", "a*,*b") 202 | 203 | assert str(builder.params) == "x=like%28any%29.%7Ba%2A%2C%2Ab%7D" 204 | 205 | 206 | def test_ilike_all_of(filter_request_builder): 207 | builder = filter_request_builder.ilike_all_of("x", "A*,*b") 208 | 209 | assert str(builder.params) == "x=ilike%28all%29.%7BA%2A%2C%2Ab%7D" 210 | 211 | 212 | def test_ilike_any_of(filter_request_builder): 213 | builder = filter_request_builder.ilike_any_of("x", "A*,*b") 214 | 215 | assert str(builder.params) == "x=ilike%28any%29.%7BA%2A%2C%2Ab%7D" 216 | 217 | 218 | def test_is_(filter_request_builder): 219 | builder = filter_request_builder.is_("x", "a") 220 | 221 | assert str(builder.params) == "x=is.a" 222 | 223 | 224 | def test_in_(filter_request_builder): 225 | builder = filter_request_builder.in_("x", ["a", "b"]) 226 | 227 | assert str(builder.params) == "x=in.%28a%2Cb%29" 228 | 229 | 230 | def test_or_(filter_request_builder): 231 | builder = filter_request_builder.or_("x.eq.1") 232 | 233 | assert str(builder.params) == "or=%28x.eq.1%29" 234 | 235 | 236 | def test_or_in_contain(filter_request_builder): 237 | builder = filter_request_builder.or_("id.in.(5,6,7), arraycol.cs.{'a','b'}") 238 | 239 | assert ( 240 | str(builder.params) 241 | == "or=%28id.in.%285%2C6%2C7%29%2C+arraycol.cs.%7B%27a%27%2C%27b%27%7D%29" 242 | ) 243 | -------------------------------------------------------------------------------- /tests/_async/test_filter_request_builder_integration.py: -------------------------------------------------------------------------------- 1 | from .client import rest_client 2 | 3 | 4 | async def test_multivalued_param(): 5 | res = ( 6 | await rest_client() 7 | .from_("countries") 8 | .select("country_name, iso", count="exact") 9 | .lte("numcode", 8) 10 | .gte("numcode", 4) 11 | .execute() 12 | ) 13 | 14 | assert res.count == 2 15 | assert res.data == [ 16 | {"country_name": "AFGHANISTAN", "iso": "AF"}, 17 | {"country_name": "ALBANIA", "iso": "AL"}, 18 | ] 19 | 20 | 21 | async def test_match(): 22 | res = ( 23 | await rest_client() 24 | .from_("countries") 25 | .select("country_name, iso") 26 | .match({"numcode": 8, "nicename": "Albania"}) 27 | .single() 28 | .execute() 29 | ) 30 | 31 | assert res.data == {"country_name": "ALBANIA", "iso": "AL"} 32 | 33 | 34 | async def test_equals(): 35 | res = ( 36 | await rest_client() 37 | .from_("countries") 38 | .select("country_name, iso") 39 | .eq("nicename", "Albania") 40 | .single() 41 | .execute() 42 | ) 43 | 44 | assert res.data == {"country_name": "ALBANIA", "iso": "AL"} 45 | 46 | 47 | async def test_not_equal(): 48 | res = ( 49 | await rest_client() 50 | .from_("users") 51 | .select("id, name") 52 | .neq("name", "Jane") 53 | .single() 54 | .execute() 55 | ) 56 | 57 | assert res.data == {"id": 1, "name": "Michael"} 58 | 59 | 60 | async def test_greater_than(): 61 | res = ( 62 | await rest_client() 63 | .from_("users") 64 | .select("id, name") 65 | .gt("id", 1) 66 | .single() 67 | .execute() 68 | ) 69 | 70 | assert res.data == {"id": 2, "name": "Jane"} 71 | 72 | 73 | async def test_greater_than_or_equals_to(): 74 | res = await rest_client().from_("users").select("id, name").gte("id", 1).execute() 75 | 76 | assert res.data == [{"id": 1, "name": "Michael"}, {"id": 2, "name": "Jane"}] 77 | 78 | 79 | async def test_contains_dictionary(): 80 | res = ( 81 | await rest_client() 82 | .from_("users") 83 | .select("name") 84 | .contains("address", {"postcode": 90210}) 85 | .single() 86 | .execute() 87 | ) 88 | 89 | assert res.data == {"name": "Michael"} 90 | 91 | 92 | async def test_contains_any_item(): 93 | res = ( 94 | await rest_client() 95 | .from_("issues") 96 | .select("title") 97 | .contains("tags", ["is:open", "priority:low"]) 98 | .execute() 99 | ) 100 | 101 | assert res.data == [{"title": "Cache invalidation is not working"}] 102 | 103 | 104 | async def test_contains_on_range(): 105 | res = ( 106 | await rest_client() 107 | .from_("reservations") 108 | .select("id, room_name") 109 | .contains("during", "[2000-01-01 13:00, 2000-01-01 13:30)") 110 | .execute() 111 | ) 112 | 113 | assert res.data == [{"id": 1, "room_name": "Emerald"}] 114 | 115 | 116 | async def test_contained_by_mixed_items(): 117 | res = ( 118 | await rest_client() 119 | .from_("reservations") 120 | .select("id, room_name") 121 | .contained_by("during", "[2000-01-01 00:00, 2000-01-01 23:59)") 122 | .execute() 123 | ) 124 | 125 | assert res.data == [{"id": 1, "room_name": "Emerald"}] 126 | 127 | 128 | async def test_range_greater_than(): 129 | res = ( 130 | await rest_client() 131 | .from_("reservations") 132 | .select("id, room_name") 133 | .range_gt("during", ["2000-01-02 08:00", "2000-01-02 09:00"]) 134 | .execute() 135 | ) 136 | 137 | assert res.data == [{"id": 2, "room_name": "Topaz"}] 138 | 139 | 140 | async def test_range_greater_than_or_equal_to(): 141 | res = ( 142 | await rest_client() 143 | .from_("reservations") 144 | .select("id, room_name") 145 | .range_gte("during", ["2000-01-02 08:30", "2000-01-02 09:30"]) 146 | .execute() 147 | ) 148 | 149 | assert res.data == [{"id": 2, "room_name": "Topaz"}] 150 | 151 | 152 | async def test_range_less_than(): 153 | res = ( 154 | await rest_client() 155 | .from_("reservations") 156 | .select("id, room_name") 157 | .range_lt("during", ["2000-01-01 15:00", "2000-01-02 16:00"]) 158 | .execute() 159 | ) 160 | 161 | assert res.data == [{"id": 1, "room_name": "Emerald"}] 162 | 163 | 164 | async def test_range_less_than_or_equal_to(): 165 | res = ( 166 | await rest_client() 167 | .from_("reservations") 168 | .select("id, room_name") 169 | .range_lte("during", ["2000-01-01 14:00", "2000-01-01 16:00"]) 170 | .execute() 171 | ) 172 | 173 | assert res.data == [{"id": 1, "room_name": "Emerald"}] 174 | 175 | 176 | async def test_range_adjacent(): 177 | res = ( 178 | await rest_client() 179 | .from_("reservations") 180 | .select("id, room_name") 181 | .range_adjacent("during", ["2000-01-01 12:00", "2000-01-01 13:00"]) 182 | .execute() 183 | ) 184 | 185 | assert res.data == [{"id": 1, "room_name": "Emerald"}] 186 | 187 | 188 | async def test_overlaps(): 189 | res = ( 190 | await rest_client() 191 | .from_("issues") 192 | .select("title") 193 | .overlaps("tags", ["is:closed", "severity:high"]) 194 | .execute() 195 | ) 196 | 197 | assert res.data == [ 198 | {"title": "Cache invalidation is not working"}, 199 | {"title": "Add alias to filters"}, 200 | ] 201 | 202 | 203 | async def test_overlaps_with_timestamp_range(): 204 | res = ( 205 | await rest_client() 206 | .from_("reservations") 207 | .select("room_name") 208 | .overlaps("during", "[2000-01-01 12:45, 2000-01-01 13:15)") 209 | .execute() 210 | ) 211 | 212 | assert res.data == [ 213 | {"room_name": "Emerald"}, 214 | ] 215 | 216 | 217 | async def test_like(): 218 | res = ( 219 | await rest_client() 220 | .from_("countries") 221 | .select("country_name, iso") 222 | .like("nicename", "%Alba%") 223 | .execute() 224 | ) 225 | 226 | assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] 227 | 228 | 229 | async def test_ilike(): 230 | res = ( 231 | await rest_client() 232 | .from_("countries") 233 | .select("country_name, iso") 234 | .ilike("nicename", "%alban%") 235 | .execute() 236 | ) 237 | 238 | assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] 239 | 240 | 241 | async def test_like_all_of(): 242 | res = ( 243 | await rest_client() 244 | .from_("countries") 245 | .select("nicename, iso") 246 | .like_all_of("nicename", "A*,*n") 247 | .execute() 248 | ) 249 | 250 | assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] 251 | 252 | 253 | async def test_like_any_of(): 254 | res = ( 255 | await rest_client() 256 | .from_("countries") 257 | .select("nicename, iso") 258 | .like_any_of("nicename", "Al*,*ia") 259 | .execute() 260 | ) 261 | 262 | assert res.data == [ 263 | {"iso": "AL", "nicename": "Albania"}, 264 | {"iso": "DZ", "nicename": "Algeria"}, 265 | ] 266 | 267 | 268 | async def test_ilike_all_of(): 269 | res = ( 270 | await rest_client() 271 | .from_("countries") 272 | .select("nicename, iso") 273 | .ilike_all_of("nicename", "a*,*n") 274 | .execute() 275 | ) 276 | 277 | assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] 278 | 279 | 280 | async def test_ilike_any_of(): 281 | res = ( 282 | await rest_client() 283 | .from_("countries") 284 | .select("nicename, iso") 285 | .ilike_any_of("nicename", "al*,*ia") 286 | .execute() 287 | ) 288 | 289 | assert res.data == [ 290 | {"iso": "AL", "nicename": "Albania"}, 291 | {"iso": "DZ", "nicename": "Algeria"}, 292 | ] 293 | 294 | 295 | async def test_is_(): 296 | res = ( 297 | await rest_client() 298 | .from_("countries") 299 | .select("country_name, iso") 300 | .is_("numcode", "null") 301 | .limit(1) 302 | .order("nicename") 303 | .execute() 304 | ) 305 | 306 | assert res.data == [{"country_name": "ANTARCTICA", "iso": "AQ"}] 307 | 308 | 309 | async def test_is_not(): 310 | res = ( 311 | await rest_client() 312 | .from_("countries") 313 | .select("country_name, iso") 314 | .not_.is_("numcode", "null") 315 | .limit(1) 316 | .order("nicename") 317 | .execute() 318 | ) 319 | 320 | assert res.data == [{"country_name": "AFGHANISTAN", "iso": "AF"}] 321 | 322 | 323 | async def test_in_(): 324 | res = ( 325 | await rest_client() 326 | .from_("countries") 327 | .select("country_name, iso") 328 | .in_("nicename", ["Albania", "Algeria"]) 329 | .execute() 330 | ) 331 | 332 | assert res.data == [ 333 | {"country_name": "ALBANIA", "iso": "AL"}, 334 | {"country_name": "ALGERIA", "iso": "DZ"}, 335 | ] 336 | 337 | 338 | async def test_or_(): 339 | res = ( 340 | await rest_client() 341 | .from_("countries") 342 | .select("country_name, iso") 343 | .or_("iso.eq.DZ,nicename.eq.Albania") 344 | .execute() 345 | ) 346 | 347 | assert res.data == [ 348 | {"country_name": "ALBANIA", "iso": "AL"}, 349 | {"country_name": "ALGERIA", "iso": "DZ"}, 350 | ] 351 | 352 | 353 | async def test_or_with_and(): 354 | res = ( 355 | await rest_client() 356 | .from_("countries") 357 | .select("country_name, iso") 358 | .or_("phonecode.gt.506,and(iso.eq.AL,nicename.eq.Albania)") 359 | .execute() 360 | ) 361 | 362 | assert res.data == [ 363 | {"country_name": "ALBANIA", "iso": "AL"}, 364 | {"country_name": "TRINIDAD AND TOBAGO", "iso": "TT"}, 365 | ] 366 | 367 | 368 | async def test_or_in(): 369 | res = ( 370 | await rest_client() 371 | .from_("issues") 372 | .select("id, title") 373 | .or_("id.in.(1,4),tags.cs.{is:open,priority:high}") 374 | .execute() 375 | ) 376 | 377 | assert res.data == [ 378 | {"id": 1, "title": "Cache invalidation is not working"}, 379 | {"id": 3, "title": "Add missing postgrest filters"}, 380 | {"id": 4, "title": "Add alias to filters"}, 381 | ] 382 | 383 | 384 | async def test_or_on_reference_table(): 385 | res = ( 386 | await rest_client() 387 | .from_("countries") 388 | .select("country_name, cities!inner(name)") 389 | .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") 390 | .execute() 391 | ) 392 | 393 | assert res.data == [ 394 | { 395 | "country_name": "UNITED KINGDOM", 396 | "cities": [ 397 | {"name": "London"}, 398 | {"name": "Manchester"}, 399 | {"name": "Liverpool"}, 400 | {"name": "Bristol"}, 401 | ], 402 | }, 403 | ] 404 | 405 | 406 | async def test_explain_json(): 407 | res = ( 408 | await rest_client() 409 | .from_("countries") 410 | .select("country_name, cities!inner(name)") 411 | .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") 412 | .explain(format="json", analyze=True) 413 | .execute() 414 | ) 415 | assert res.data[0]["Plan"]["Node Type"] == "Aggregate" 416 | 417 | 418 | async def test_csv(): 419 | res = ( 420 | await rest_client() 421 | .from_("countries") 422 | .select("country_name, iso") 423 | .in_("nicename", ["Albania", "Algeria"]) 424 | .csv() 425 | .execute() 426 | ) 427 | assert "ALBANIA,AL\nALGERIA,DZ" in res.data 428 | 429 | 430 | async def test_explain_text(): 431 | res = ( 432 | await rest_client() 433 | .from_("countries") 434 | .select("country_name, cities!inner(name)") 435 | .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") 436 | .explain(analyze=True, verbose=True, settings=True, buffers=True, wal=True) 437 | .execute() 438 | ) 439 | assert ( 440 | "((cities_1.country_id = countries.id) AND ((cities_1.country_id = '10'::bigint) OR (cities_1.name = 'Paris'::text)))" 441 | in res 442 | ) 443 | 444 | 445 | async def test_rpc_with_single(): 446 | res = ( 447 | await rest_client() 448 | .rpc("list_stored_countries", {}) 449 | .select("nicename, country_name, iso") 450 | .eq("nicename", "Albania") 451 | .single() 452 | .execute() 453 | ) 454 | 455 | assert res.data == {"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"} 456 | 457 | 458 | async def test_rpc_with_limit(): 459 | res = ( 460 | await rest_client() 461 | .rpc("list_stored_countries", {}) 462 | .select("nicename, country_name, iso") 463 | .eq("nicename", "Albania") 464 | .limit(1) 465 | .execute() 466 | ) 467 | 468 | assert res.data == [{"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"}] 469 | 470 | 471 | async def test_rpc_with_range(): 472 | res = ( 473 | await rest_client() 474 | .rpc("list_stored_countries", {}) 475 | .select("nicename, iso") 476 | .range(1, 2) 477 | .execute() 478 | ) 479 | 480 | assert res.data == [ 481 | {"nicename": "Albania", "iso": "AL"}, 482 | {"nicename": "Algeria", "iso": "DZ"}, 483 | ] 484 | 485 | 486 | async def test_rpc_post_with_args(): 487 | res = ( 488 | await rest_client() 489 | .rpc("search_countries_by_name", {"search_name": "Alban"}) 490 | .select("nicename, iso") 491 | .execute() 492 | ) 493 | assert res.data == [{"nicename": "Albania", "iso": "AL"}] 494 | 495 | 496 | async def test_rpc_get_with_args(): 497 | res = ( 498 | await rest_client() 499 | .rpc("search_countries_by_name", {"search_name": "Alger"}, get=True) 500 | .select("nicename, iso") 501 | .execute() 502 | ) 503 | assert res.data == [{"nicename": "Algeria", "iso": "DZ"}] 504 | 505 | 506 | async def test_rpc_get_with_count(): 507 | res = ( 508 | await rest_client() 509 | .rpc("search_countries_by_name", {"search_name": "Al"}, get=True, count="exact") 510 | .select("nicename") 511 | .execute() 512 | ) 513 | assert res.count == 2 514 | assert res.data == [{"nicename": "Albania"}, {"nicename": "Algeria"}] 515 | 516 | 517 | async def test_rpc_head_count(): 518 | res = ( 519 | await rest_client() 520 | .rpc("search_countries_by_name", {"search_name": "Al"}, head=True, count="exact") 521 | .execute() 522 | ) 523 | 524 | assert res.count == 2 525 | assert res.data == [] 526 | 527 | 528 | async def test_order(): 529 | res = ( 530 | await rest_client() 531 | .from_("countries") 532 | .select("country_name, iso") 533 | .limit(3) 534 | .order("nicename", desc=True) 535 | .execute() 536 | ) 537 | 538 | assert res.data == [ 539 | {"country_name": "ZIMBABWE", "iso": "ZW"}, 540 | {"country_name": "UNITED STATES", "iso": "US"}, 541 | {"country_name": "UNITED KINGDOM", "iso": "GB"}, 542 | ] 543 | 544 | 545 | async def test_order_on_foreign_table(): 546 | res = ( 547 | await rest_client() 548 | .from_("orchestral_sections") 549 | .select("name, instruments(name)") 550 | .order("name", desc=True, foreign_table="instruments") 551 | .execute() 552 | ) 553 | 554 | assert res.data == [ 555 | {"name": "strings", "instruments": [{"name": "violin"}, {"name": "harp"}]}, 556 | {"name": "woodwinds", "instruments": []}, 557 | ] 558 | -------------------------------------------------------------------------------- /tests/_async/test_query_request_builder.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from httpx import Headers, QueryParams 3 | 4 | from postgrest import AsyncQueryRequestBuilder 5 | from postgrest.utils import AsyncClient 6 | 7 | 8 | @pytest.fixture 9 | async def query_request_builder(): 10 | async with AsyncClient() as client: 11 | yield AsyncQueryRequestBuilder( 12 | client, "/example_table", "GET", Headers(), QueryParams(), {} 13 | ) 14 | 15 | 16 | def test_constructor(query_request_builder: AsyncQueryRequestBuilder): 17 | builder = query_request_builder 18 | 19 | assert builder.path == "/example_table" 20 | assert len(builder.headers) == 0 21 | assert len(builder.params) == 0 22 | assert builder.http_method == "GET" 23 | assert builder.json == None 24 | -------------------------------------------------------------------------------- /tests/_async/test_request_builder.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List 2 | 3 | import pytest 4 | from httpx import Request, Response 5 | 6 | from postgrest import AsyncRequestBuilder, AsyncSingleRequestBuilder 7 | from postgrest.base_request_builder import APIResponse, SingleAPIResponse 8 | from postgrest.types import CountMethod 9 | from postgrest.utils import AsyncClient 10 | 11 | 12 | @pytest.fixture 13 | async def request_builder(): 14 | async with AsyncClient() as client: 15 | yield AsyncRequestBuilder(client, "/example_table") 16 | 17 | 18 | def test_constructor(request_builder): 19 | assert request_builder.path == "/example_table" 20 | 21 | 22 | class TestSelect: 23 | def test_select(self, request_builder: AsyncRequestBuilder): 24 | builder = request_builder.select("col1", "col2") 25 | 26 | assert builder.params["select"] == "col1,col2" 27 | assert builder.headers.get("prefer") is None 28 | assert builder.http_method == "GET" 29 | assert builder.json == None 30 | 31 | def test_select_with_count(self, request_builder: AsyncRequestBuilder): 32 | builder = request_builder.select(count=CountMethod.exact) 33 | 34 | assert builder.params["select"] == "*" 35 | assert builder.headers["prefer"] == "count=exact" 36 | assert builder.http_method == "GET" 37 | assert builder.json == None 38 | 39 | def test_select_with_head(self, request_builder: AsyncRequestBuilder): 40 | builder = request_builder.select("col1", "col2", head=True) 41 | 42 | assert builder.params.get("select") == "col1,col2" 43 | assert builder.headers.get("prefer") is None 44 | assert builder.http_method == "HEAD" 45 | assert builder.json == None 46 | 47 | def test_select_as_csv(self, request_builder: AsyncRequestBuilder): 48 | builder = request_builder.select("*").csv() 49 | 50 | assert builder.headers["Accept"] == "text/csv" 51 | assert isinstance(builder, AsyncSingleRequestBuilder) 52 | 53 | 54 | class TestInsert: 55 | def test_insert(self, request_builder: AsyncRequestBuilder): 56 | builder = request_builder.insert({"key1": "val1"}) 57 | 58 | assert builder.headers.get_list("prefer", True) == ["return=representation"] 59 | assert builder.http_method == "POST" 60 | assert builder.json == {"key1": "val1"} 61 | 62 | def test_insert_with_count(self, request_builder: AsyncRequestBuilder): 63 | builder = request_builder.insert({"key1": "val1"}, count=CountMethod.exact) 64 | 65 | assert builder.headers.get_list("prefer", True) == [ 66 | "return=representation", 67 | "count=exact", 68 | ] 69 | assert builder.http_method == "POST" 70 | assert builder.json == {"key1": "val1"} 71 | 72 | def test_insert_with_upsert(self, request_builder: AsyncRequestBuilder): 73 | builder = request_builder.insert({"key1": "val1"}, upsert=True) 74 | 75 | assert builder.headers.get_list("prefer", True) == [ 76 | "return=representation", 77 | "resolution=merge-duplicates", 78 | ] 79 | assert builder.http_method == "POST" 80 | assert builder.json == {"key1": "val1"} 81 | 82 | def test_upsert_with_default_single(self, request_builder: AsyncRequestBuilder): 83 | builder = request_builder.upsert([{"key1": "val1"}], default_to_null=False) 84 | assert builder.headers.get_list("prefer", True) == [ 85 | "return=representation", 86 | "resolution=merge-duplicates", 87 | "missing=default", 88 | ] 89 | assert builder.http_method == "POST" 90 | assert builder.json == [{"key1": "val1"}] 91 | assert builder.params.get("columns") == '"key1"' 92 | 93 | def test_bulk_insert_using_default(self, request_builder: AsyncRequestBuilder): 94 | builder = request_builder.insert( 95 | [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False 96 | ) 97 | assert builder.headers.get_list("prefer", True) == [ 98 | "return=representation", 99 | "missing=default", 100 | ] 101 | assert builder.http_method == "POST" 102 | assert builder.json == [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}] 103 | assert set(builder.params["columns"].split(",")) == set( 104 | '"key1","key2","key3"'.split(",") 105 | ) 106 | 107 | def test_upsert(self, request_builder: AsyncRequestBuilder): 108 | builder = request_builder.upsert({"key1": "val1"}) 109 | 110 | assert builder.headers.get_list("prefer", True) == [ 111 | "return=representation", 112 | "resolution=merge-duplicates", 113 | ] 114 | assert builder.http_method == "POST" 115 | assert builder.json == {"key1": "val1"} 116 | 117 | def test_bulk_upsert_with_default(self, request_builder: AsyncRequestBuilder): 118 | builder = request_builder.upsert( 119 | [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False 120 | ) 121 | assert builder.headers.get_list("prefer", True) == [ 122 | "return=representation", 123 | "resolution=merge-duplicates", 124 | "missing=default", 125 | ] 126 | assert builder.http_method == "POST" 127 | assert builder.json == [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}] 128 | assert set(builder.params["columns"].split(",")) == set( 129 | '"key1","key2","key3"'.split(",") 130 | ) 131 | 132 | 133 | class TestUpdate: 134 | def test_update(self, request_builder: AsyncRequestBuilder): 135 | builder = request_builder.update({"key1": "val1"}) 136 | 137 | assert builder.headers.get_list("prefer", True) == ["return=representation"] 138 | assert builder.http_method == "PATCH" 139 | assert builder.json == {"key1": "val1"} 140 | 141 | def test_update_with_count(self, request_builder: AsyncRequestBuilder): 142 | builder = request_builder.update({"key1": "val1"}, count=CountMethod.exact) 143 | 144 | assert builder.headers.get_list("prefer", True) == [ 145 | "return=representation", 146 | "count=exact", 147 | ] 148 | assert builder.http_method == "PATCH" 149 | assert builder.json == {"key1": "val1"} 150 | 151 | 152 | class TestDelete: 153 | def test_delete(self, request_builder: AsyncRequestBuilder): 154 | builder = request_builder.delete() 155 | 156 | assert builder.headers.get_list("prefer", True) == ["return=representation"] 157 | assert builder.http_method == "DELETE" 158 | assert builder.json == {} 159 | 160 | def test_delete_with_count(self, request_builder: AsyncRequestBuilder): 161 | builder = request_builder.delete(count=CountMethod.exact) 162 | 163 | assert builder.headers.get_list("prefer", True) == [ 164 | "return=representation", 165 | "count=exact", 166 | ] 167 | assert builder.http_method == "DELETE" 168 | assert builder.json == {} 169 | 170 | 171 | class TestTextSearch: 172 | def test_text_search(self, request_builder: AsyncRequestBuilder): 173 | builder = request_builder.select("catchphrase").text_search( 174 | "catchphrase", 175 | "'fat' & 'cat'", 176 | { 177 | "type": "plain", 178 | "config": "english", 179 | }, 180 | ) 181 | assert "catchphrase=plfts%28english%29.%27fat%27+%26+%27cat%27" in str( 182 | builder.params 183 | ) 184 | 185 | 186 | class TestExplain: 187 | def test_explain_plain(self, request_builder: AsyncRequestBuilder): 188 | builder = request_builder.select("*").explain() 189 | assert builder.params["select"] == "*" 190 | assert "application/vnd.pgrst.plan" in str(builder.headers.get("accept")) 191 | 192 | def test_explain_options(self, request_builder: AsyncRequestBuilder): 193 | builder = request_builder.select("*").explain( 194 | format="json", analyze=True, verbose=True, buffers=True, wal=True 195 | ) 196 | assert builder.params["select"] == "*" 197 | assert "application/vnd.pgrst.plan+json;" in str(builder.headers.get("accept")) 198 | assert "options=analyze|verbose|buffers|wal" in str(builder.headers.get("accept")) 199 | 200 | 201 | class TestOrder: 202 | def test_order(self, request_builder: AsyncRequestBuilder): 203 | builder = request_builder.select().order("country_name", desc=True) 204 | assert str(builder.params) == "select=%2A&order=country_name.desc" 205 | 206 | def test_multiple_orders(self, request_builder: AsyncRequestBuilder): 207 | builder = ( 208 | request_builder.select() 209 | .order("country_name", desc=True) 210 | .order("iso", desc=True) 211 | ) 212 | assert str(builder.params) == "select=%2A&order=country_name.desc%2Ciso.desc" 213 | 214 | def test_multiple_orders_on_foreign_table(self, request_builder: AsyncRequestBuilder): 215 | foreign_table = "cities" 216 | builder = ( 217 | request_builder.select() 218 | .order("city_name", desc=True, foreign_table=foreign_table) 219 | .order("id", desc=True, foreign_table=foreign_table) 220 | ) 221 | assert str(builder.params) == "select=%2A&cities.order=city_name.desc%2Cid.desc" 222 | 223 | 224 | class TestRange: 225 | def test_range_on_own_table(self, request_builder: AsyncRequestBuilder): 226 | builder = request_builder.select("*").range(0, 1) 227 | assert builder.params["select"] == "*" 228 | assert builder.params["limit"] == "2" 229 | assert builder.params["offset"] == "0" 230 | 231 | def test_range_on_foreign_table(self, request_builder: AsyncRequestBuilder): 232 | foreign_table = "cities" 233 | builder = request_builder.select("*").range(1, 2, foreign_table) 234 | assert builder.params["select"] == "*" 235 | assert builder.params[f"{foreign_table}.limit"] == "2" 236 | assert builder.params[f"{foreign_table}.offset"] == "1" 237 | 238 | 239 | @pytest.fixture 240 | def csv_api_response() -> str: 241 | return "id,name\n1,foo\n" 242 | 243 | 244 | @pytest.fixture 245 | def api_response_with_error() -> Dict[str, Any]: 246 | return { 247 | "message": "Route GET:/countries?select=%2A not found", 248 | "error": "Not Found", 249 | "statusCode": 404, 250 | } 251 | 252 | 253 | @pytest.fixture 254 | def api_response() -> List[Dict[str, Any]]: 255 | return [ 256 | { 257 | "id": 1, 258 | "name": "Bonaire, Sint Eustatius and Saba", 259 | "iso2": "BQ", 260 | "iso3": "BES", 261 | "local_name": None, 262 | "continent": None, 263 | }, 264 | { 265 | "id": 2, 266 | "name": "Curaçao", 267 | "iso2": "CW", 268 | "iso3": "CUW", 269 | "local_name": None, 270 | "continent": None, 271 | }, 272 | ] 273 | 274 | 275 | @pytest.fixture 276 | def single_api_response() -> Dict[str, Any]: 277 | return { 278 | "id": 1, 279 | "name": "Bonaire, Sint Eustatius and Saba", 280 | "iso2": "BQ", 281 | "iso3": "BES", 282 | "local_name": None, 283 | "continent": None, 284 | } 285 | 286 | 287 | @pytest.fixture 288 | def content_range_header_with_count() -> str: 289 | return "0-1/2" 290 | 291 | 292 | @pytest.fixture 293 | def content_range_header_without_count() -> str: 294 | return "0-1" 295 | 296 | 297 | @pytest.fixture 298 | def prefer_header_with_count() -> str: 299 | return "count=exact" 300 | 301 | 302 | @pytest.fixture 303 | def prefer_header_without_count() -> str: 304 | return "random prefer header" 305 | 306 | 307 | @pytest.fixture 308 | def request_response_without_prefer_header() -> Response: 309 | return Response( 310 | status_code=200, request=Request(method="GET", url="http://example.com") 311 | ) 312 | 313 | 314 | @pytest.fixture 315 | def request_response_with_prefer_header_without_count( 316 | prefer_header_without_count: str, 317 | ) -> Response: 318 | return Response( 319 | status_code=200, 320 | request=Request( 321 | method="GET", 322 | url="http://example.com", 323 | headers={"prefer": prefer_header_without_count}, 324 | ), 325 | ) 326 | 327 | 328 | @pytest.fixture 329 | def request_response_with_prefer_header_with_count_and_content_range( 330 | prefer_header_with_count: str, content_range_header_with_count: str 331 | ) -> Response: 332 | return Response( 333 | status_code=200, 334 | headers={"content-range": content_range_header_with_count}, 335 | request=Request( 336 | method="GET", 337 | url="http://example.com", 338 | headers={"prefer": prefer_header_with_count}, 339 | ), 340 | ) 341 | 342 | 343 | @pytest.fixture 344 | def request_response_with_data( 345 | prefer_header_with_count: str, 346 | content_range_header_with_count: str, 347 | api_response: List[Dict[str, Any]], 348 | ) -> Response: 349 | return Response( 350 | status_code=200, 351 | headers={"content-range": content_range_header_with_count}, 352 | json=api_response, 353 | request=Request( 354 | method="GET", 355 | url="http://example.com", 356 | headers={"prefer": prefer_header_with_count}, 357 | ), 358 | ) 359 | 360 | 361 | @pytest.fixture 362 | def request_response_with_single_data( 363 | prefer_header_with_count: str, 364 | content_range_header_with_count: str, 365 | single_api_response: Dict[str, Any], 366 | ) -> Response: 367 | return Response( 368 | status_code=200, 369 | headers={"content-range": content_range_header_with_count}, 370 | json=single_api_response, 371 | request=Request( 372 | method="GET", 373 | url="http://example.com", 374 | headers={"prefer": prefer_header_with_count}, 375 | ), 376 | ) 377 | 378 | 379 | @pytest.fixture 380 | def request_response_with_csv_data(csv_api_response: str) -> Response: 381 | return Response( 382 | status_code=200, 383 | text=csv_api_response, 384 | request=Request(method="GET", url="http://example.com"), 385 | ) 386 | 387 | 388 | class TestApiResponse: 389 | def test_response_raises_when_api_error( 390 | self, api_response_with_error: Dict[str, Any] 391 | ): 392 | with pytest.raises(ValueError): 393 | APIResponse(data=api_response_with_error) 394 | 395 | def test_parses_valid_response_only_data(self, api_response: List[Dict[str, Any]]): 396 | result = APIResponse(data=api_response) 397 | assert result.data == api_response 398 | 399 | def test_parses_valid_response_data_and_count( 400 | self, api_response: List[Dict[str, Any]] 401 | ): 402 | count = len(api_response) 403 | result = APIResponse(data=api_response, count=count) 404 | assert result.data == api_response 405 | assert result.count == count 406 | 407 | def test_get_count_from_content_range_header_with_count( 408 | self, content_range_header_with_count: str 409 | ): 410 | assert ( 411 | APIResponse._get_count_from_content_range_header( 412 | content_range_header_with_count 413 | ) 414 | == 2 415 | ) 416 | 417 | def test_get_count_from_content_range_header_without_count( 418 | self, content_range_header_without_count: str 419 | ): 420 | assert ( 421 | APIResponse._get_count_from_content_range_header( 422 | content_range_header_without_count 423 | ) 424 | is None 425 | ) 426 | 427 | def test_is_count_in_prefer_header_true(self, prefer_header_with_count: str): 428 | assert APIResponse._is_count_in_prefer_header(prefer_header_with_count) 429 | 430 | def test_is_count_in_prefer_header_false(self, prefer_header_without_count: str): 431 | assert not APIResponse._is_count_in_prefer_header(prefer_header_without_count) 432 | 433 | def test_get_count_from_http_request_response_without_prefer_header( 434 | self, request_response_without_prefer_header: Response 435 | ): 436 | assert ( 437 | APIResponse._get_count_from_http_request_response( 438 | request_response_without_prefer_header 439 | ) 440 | is None 441 | ) 442 | 443 | def test_get_count_from_http_request_response_with_prefer_header_without_count( 444 | self, request_response_with_prefer_header_without_count: Response 445 | ): 446 | assert ( 447 | APIResponse._get_count_from_http_request_response( 448 | request_response_with_prefer_header_without_count 449 | ) 450 | is None 451 | ) 452 | 453 | def test_get_count_from_http_request_response_with_count_and_content_range( 454 | self, request_response_with_prefer_header_with_count_and_content_range: Response 455 | ): 456 | assert ( 457 | APIResponse._get_count_from_http_request_response( 458 | request_response_with_prefer_header_with_count_and_content_range 459 | ) 460 | == 2 461 | ) 462 | 463 | def test_from_http_request_response_constructor( 464 | self, request_response_with_data: Response, api_response: List[Dict[str, Any]] 465 | ): 466 | result = APIResponse.from_http_request_response(request_response_with_data) 467 | assert result.data == api_response 468 | assert result.count == 2 469 | 470 | def test_single_from_http_request_response_constructor( 471 | self, 472 | request_response_with_single_data: Response, 473 | single_api_response: Dict[str, Any], 474 | ): 475 | result = SingleAPIResponse.from_http_request_response( 476 | request_response_with_single_data 477 | ) 478 | assert isinstance(result.data, dict) 479 | assert result.data == single_api_response 480 | assert result.count == 2 481 | 482 | def test_single_with_csv_data( 483 | self, request_response_with_csv_data: Response, csv_api_response: str 484 | ): 485 | result = SingleAPIResponse.from_http_request_response( 486 | request_response_with_csv_data 487 | ) 488 | assert isinstance(result.data, str) 489 | assert result.data == csv_api_response 490 | -------------------------------------------------------------------------------- /tests/_sync/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/supabase/postgrest-py/3547d62831b1d3a04adf3a2f80ba991840e5270d/tests/_sync/__init__.py -------------------------------------------------------------------------------- /tests/_sync/client.py: -------------------------------------------------------------------------------- 1 | from postgrest import SyncPostgrestClient 2 | 3 | REST_URL = "http://127.0.0.1:3000" 4 | 5 | 6 | def rest_client(): 7 | return SyncPostgrestClient( 8 | base_url=REST_URL, 9 | ) 10 | -------------------------------------------------------------------------------- /tests/_sync/test_client.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | import pytest 4 | from httpx import BasicAuth, Headers, Request, Response 5 | 6 | from postgrest import SyncPostgrestClient 7 | from postgrest.exceptions import APIError 8 | 9 | 10 | @pytest.fixture 11 | def postgrest_client(): 12 | with SyncPostgrestClient("https://example.com") as client: 13 | yield client 14 | 15 | 16 | class TestConstructor: 17 | def test_simple(self, postgrest_client: SyncPostgrestClient): 18 | session = postgrest_client.session 19 | 20 | assert session.base_url == "https://example.com" 21 | headers = Headers( 22 | { 23 | "Accept": "application/json", 24 | "Content-Type": "application/json", 25 | "Accept-Profile": "public", 26 | "Content-Profile": "public", 27 | } 28 | ) 29 | assert session.headers.items() >= headers.items() 30 | 31 | def test_custom_headers(self): 32 | with SyncPostgrestClient( 33 | "https://example.com", schema="pub", headers={"Custom-Header": "value"} 34 | ) as client: 35 | session = client.session 36 | 37 | assert session.base_url == "https://example.com" 38 | headers = Headers( 39 | { 40 | "Accept-Profile": "pub", 41 | "Content-Profile": "pub", 42 | "Custom-Header": "value", 43 | } 44 | ) 45 | assert session.headers.items() >= headers.items() 46 | 47 | 48 | class TestAuth: 49 | def test_auth_token(self, postgrest_client: SyncPostgrestClient): 50 | postgrest_client.auth("s3cr3t") 51 | session = postgrest_client.session 52 | 53 | assert session.headers["Authorization"] == "Bearer s3cr3t" 54 | 55 | def test_auth_basic(self, postgrest_client: SyncPostgrestClient): 56 | postgrest_client.auth(None, username="admin", password="s3cr3t") 57 | session = postgrest_client.session 58 | 59 | assert isinstance(session.auth, BasicAuth) 60 | assert session.auth._auth_header == BasicAuth("admin", "s3cr3t")._auth_header 61 | 62 | 63 | def test_schema(postgrest_client: SyncPostgrestClient): 64 | client = postgrest_client.schema("private") 65 | session = client.session 66 | subheaders = { 67 | "accept-profile": "private", 68 | "content-profile": "private", 69 | } 70 | 71 | assert subheaders.items() < dict(session.headers).items() 72 | 73 | 74 | def test_params_purged_after_execute(postgrest_client: SyncPostgrestClient): 75 | assert len(postgrest_client.session.params) == 0 76 | with pytest.raises(APIError): 77 | postgrest_client.from_("test").select("a", "b").eq("c", "d").execute() 78 | assert len(postgrest_client.session.params) == 0 79 | 80 | 81 | def test_response_status_code_outside_ok(postgrest_client: SyncPostgrestClient): 82 | with patch( 83 | "postgrest._sync.request_builder.SyncSelectRequestBuilder.execute", 84 | side_effect=APIError( 85 | { 86 | "message": "mock error", 87 | "code": "400", 88 | "hint": "mock", 89 | "details": "mock", 90 | "errors": [{"code": 400}], 91 | } 92 | ), 93 | ): 94 | with pytest.raises(APIError) as exc_info: 95 | postgrest_client.from_("test").select("a", "b").eq( 96 | "c", "d" 97 | ).execute() # gives status_code = 400 98 | exc_response = exc_info.value.json() 99 | assert not exc_response.get("success") 100 | assert isinstance(exc_response.get("errors"), list) 101 | assert ( 102 | isinstance(exc_response["errors"][0], dict) 103 | and "code" in exc_response["errors"][0] 104 | ) 105 | assert exc_response["errors"][0].get("code") == 400 106 | 107 | 108 | def test_response_maybe_single(postgrest_client: SyncPostgrestClient): 109 | with patch( 110 | "postgrest._sync.request_builder.SyncSingleRequestBuilder.execute", 111 | side_effect=APIError( 112 | {"message": "mock error", "code": "400", "hint": "mock", "details": "mock"} 113 | ), 114 | ): 115 | client = ( 116 | postgrest_client.from_("test").select("a", "b").eq("c", "d").maybe_single() 117 | ) 118 | assert "Accept" in client.headers 119 | assert client.headers.get("Accept") == "application/vnd.pgrst.object+json" 120 | with pytest.raises(APIError) as exc_info: 121 | client.execute() 122 | assert isinstance(exc_info, pytest.ExceptionInfo) 123 | exc_response = exc_info.value.json() 124 | assert isinstance(exc_response.get("message"), str) 125 | assert "code" in exc_response and int(exc_response["code"]) == 204 126 | 127 | 128 | # https://github.com/supabase/postgrest-py/issues/595 129 | 130 | 131 | def test_response_client_invalid_response_but_valid_json( 132 | postgrest_client: SyncPostgrestClient, 133 | ): 134 | with patch( 135 | "httpx._client.Client.request", 136 | return_value=Response( 137 | status_code=502, 138 | text='"gateway error: Error: Network connection lost."', # quotes makes this text a valid non-dict JSON object 139 | request=Request(method="GET", url="http://example.com"), 140 | ), 141 | ): 142 | client = postgrest_client.from_("test").select("a", "b").eq("c", "d").single() 143 | assert "Accept" in client.headers 144 | assert client.headers.get("Accept") == "application/vnd.pgrst.object+json" 145 | with pytest.raises(APIError) as exc_info: 146 | client.execute() 147 | assert isinstance(exc_info, pytest.ExceptionInfo) 148 | exc_response = exc_info.value.json() 149 | assert isinstance(exc_response.get("message"), str) 150 | assert exc_response.get("message") == "JSON could not be generated" 151 | assert "code" in exc_response and int(exc_response["code"]) == 502 152 | -------------------------------------------------------------------------------- /tests/_sync/test_filter_request_builder.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from httpx import Headers, QueryParams 3 | 4 | from postgrest import SyncFilterRequestBuilder 5 | from postgrest.utils import SyncClient 6 | 7 | 8 | @pytest.fixture 9 | def filter_request_builder(): 10 | with SyncClient() as client: 11 | yield SyncFilterRequestBuilder( 12 | client, "/example_table", "GET", Headers(), QueryParams(), {} 13 | ) 14 | 15 | 16 | def test_constructor(filter_request_builder: SyncFilterRequestBuilder): 17 | builder = filter_request_builder 18 | 19 | assert builder.path == "/example_table" 20 | assert len(builder.headers) == 0 21 | assert len(builder.params) == 0 22 | assert builder.http_method == "GET" 23 | assert builder.json == None 24 | assert not builder.negate_next 25 | 26 | 27 | def test_not_(filter_request_builder): 28 | builder = filter_request_builder.not_ 29 | 30 | assert builder.negate_next 31 | 32 | 33 | def test_filter(filter_request_builder): 34 | builder = filter_request_builder.filter(":col.name", "eq", "val") 35 | 36 | assert builder.params['":col.name"'] == "eq.val" 37 | 38 | 39 | @pytest.mark.parametrize( 40 | "col_name, expected_query_prefix", 41 | [ 42 | ("col:name", "%22col%3Aname%22"), 43 | ("col.name", "col.name"), 44 | ], 45 | ) 46 | def test_filter_special_characters( 47 | filter_request_builder, col_name, expected_query_prefix 48 | ): 49 | builder = filter_request_builder.filter(col_name, "eq", "val") 50 | 51 | assert str(builder.params) == f"{expected_query_prefix}=eq.val" 52 | 53 | 54 | def test_multivalued_param(filter_request_builder): 55 | builder = filter_request_builder.lte("x", "a").gte("x", "b") 56 | 57 | assert str(builder.params) == "x=lte.a&x=gte.b" 58 | 59 | 60 | def test_match(filter_request_builder): 61 | builder = filter_request_builder.match({"id": "1", "done": "false"}) 62 | assert str(builder.params) == "id=eq.1&done=eq.false" 63 | 64 | 65 | def test_equals(filter_request_builder): 66 | builder = filter_request_builder.eq("x", "a") 67 | 68 | assert str(builder.params) == "x=eq.a" 69 | 70 | 71 | def test_not_equal(filter_request_builder): 72 | builder = filter_request_builder.neq("x", "a") 73 | 74 | assert str(builder.params) == "x=neq.a" 75 | 76 | 77 | def test_greater_than(filter_request_builder): 78 | builder = filter_request_builder.gt("x", "a") 79 | 80 | assert str(builder.params) == "x=gt.a" 81 | 82 | 83 | def test_greater_than_or_equals_to(filter_request_builder): 84 | builder = filter_request_builder.gte("x", "a") 85 | 86 | assert str(builder.params) == "x=gte.a" 87 | 88 | 89 | def test_contains(filter_request_builder): 90 | builder = filter_request_builder.contains("x", "a") 91 | 92 | assert str(builder.params) == "x=cs.a" 93 | 94 | 95 | def test_contains_dictionary(filter_request_builder): 96 | builder = filter_request_builder.contains("x", {"a": "b"}) 97 | 98 | # {"a":"b"} 99 | assert str(builder.params) == "x=cs.%7B%22a%22%3A+%22b%22%7D" 100 | 101 | 102 | def test_contains_any_item(filter_request_builder): 103 | builder = filter_request_builder.contains("x", ["a", "b"]) 104 | 105 | # {a,b} 106 | assert str(builder.params) == "x=cs.%7Ba%2Cb%7D" 107 | 108 | 109 | def test_contains_in_list(filter_request_builder): 110 | builder = filter_request_builder.contains("x", '[{"a": "b"}]') 111 | 112 | # [{"a":+"b"}] (the + represents the space) 113 | assert str(builder.params) == "x=cs.%5B%7B%22a%22%3A+%22b%22%7D%5D" 114 | 115 | 116 | def test_contained_by_mixed_items(filter_request_builder): 117 | builder = filter_request_builder.contained_by("x", ["a", '["b", "c"]']) 118 | 119 | # {a,["b",+"c"]} 120 | assert str(builder.params) == "x=cd.%7Ba%2C%5B%22b%22%2C+%22c%22%5D%7D" 121 | 122 | 123 | def test_range_greater_than(filter_request_builder): 124 | builder = filter_request_builder.range_gt( 125 | "x", ["2000-01-02 08:30", "2000-01-02 09:30"] 126 | ) 127 | 128 | # {a,["b",+"c"]} 129 | assert str(builder.params) == "x=sr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" 130 | 131 | 132 | def test_range_greater_than_or_equal_to(filter_request_builder): 133 | builder = filter_request_builder.range_gte( 134 | "x", ["2000-01-02 08:30", "2000-01-02 09:30"] 135 | ) 136 | 137 | # {a,["b",+"c"]} 138 | assert str(builder.params) == "x=nxl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" 139 | 140 | 141 | def test_range_less_than(filter_request_builder): 142 | builder = filter_request_builder.range_lt( 143 | "x", ["2000-01-02 08:30", "2000-01-02 09:30"] 144 | ) 145 | 146 | # {a,["b",+"c"]} 147 | assert str(builder.params) == "x=sl.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" 148 | 149 | 150 | def test_range_less_than_or_equal_to(filter_request_builder): 151 | builder = filter_request_builder.range_lte( 152 | "x", ["2000-01-02 08:30", "2000-01-02 09:30"] 153 | ) 154 | 155 | # {a,["b",+"c"]} 156 | assert str(builder.params) == "x=nxr.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" 157 | 158 | 159 | def test_range_adjacent(filter_request_builder): 160 | builder = filter_request_builder.range_adjacent( 161 | "x", ["2000-01-02 08:30", "2000-01-02 09:30"] 162 | ) 163 | 164 | # {a,["b",+"c"]} 165 | assert str(builder.params) == "x=adj.%282000-01-02+08%3A30%2C2000-01-02+09%3A30%29" 166 | 167 | 168 | def test_overlaps(filter_request_builder): 169 | builder = filter_request_builder.overlaps("x", ["is:closed", "severity:high"]) 170 | 171 | # {a,["b",+"c"]} 172 | assert str(builder.params) == "x=ov.%7Bis%3Aclosed%2Cseverity%3Ahigh%7D" 173 | 174 | 175 | def test_overlaps_with_timestamp_range(filter_request_builder): 176 | builder = filter_request_builder.overlaps("x", "[2000-01-01 12:45, 2000-01-01 13:15)") 177 | 178 | # {a,["b",+"c"]} 179 | assert str(builder.params) == "x=ov.%5B2000-01-01+12%3A45%2C+2000-01-01+13%3A15%29" 180 | 181 | 182 | def test_like(filter_request_builder): 183 | builder = filter_request_builder.like("x", "%a%") 184 | 185 | assert str(builder.params) == "x=like.%25a%25" 186 | 187 | 188 | def test_ilike(filter_request_builder): 189 | builder = filter_request_builder.ilike("x", "%a%") 190 | 191 | assert str(builder.params) == "x=ilike.%25a%25" 192 | 193 | 194 | def test_like_all_of(filter_request_builder): 195 | builder = filter_request_builder.like_all_of("x", "A*,*b") 196 | 197 | assert str(builder.params) == "x=like%28all%29.%7BA%2A%2C%2Ab%7D" 198 | 199 | 200 | def test_like_any_of(filter_request_builder): 201 | builder = filter_request_builder.like_any_of("x", "a*,*b") 202 | 203 | assert str(builder.params) == "x=like%28any%29.%7Ba%2A%2C%2Ab%7D" 204 | 205 | 206 | def test_ilike_all_of(filter_request_builder): 207 | builder = filter_request_builder.ilike_all_of("x", "A*,*b") 208 | 209 | assert str(builder.params) == "x=ilike%28all%29.%7BA%2A%2C%2Ab%7D" 210 | 211 | 212 | def test_ilike_any_of(filter_request_builder): 213 | builder = filter_request_builder.ilike_any_of("x", "A*,*b") 214 | 215 | assert str(builder.params) == "x=ilike%28any%29.%7BA%2A%2C%2Ab%7D" 216 | 217 | 218 | def test_is_(filter_request_builder): 219 | builder = filter_request_builder.is_("x", "a") 220 | 221 | assert str(builder.params) == "x=is.a" 222 | 223 | 224 | def test_in_(filter_request_builder): 225 | builder = filter_request_builder.in_("x", ["a", "b"]) 226 | 227 | assert str(builder.params) == "x=in.%28a%2Cb%29" 228 | 229 | 230 | def test_or_(filter_request_builder): 231 | builder = filter_request_builder.or_("x.eq.1") 232 | 233 | assert str(builder.params) == "or=%28x.eq.1%29" 234 | 235 | 236 | def test_or_in_contain(filter_request_builder): 237 | builder = filter_request_builder.or_("id.in.(5,6,7), arraycol.cs.{'a','b'}") 238 | 239 | assert ( 240 | str(builder.params) 241 | == "or=%28id.in.%285%2C6%2C7%29%2C+arraycol.cs.%7B%27a%27%2C%27b%27%7D%29" 242 | ) 243 | -------------------------------------------------------------------------------- /tests/_sync/test_filter_request_builder_integration.py: -------------------------------------------------------------------------------- 1 | from .client import rest_client 2 | 3 | 4 | def test_multivalued_param(): 5 | res = ( 6 | rest_client() 7 | .from_("countries") 8 | .select("country_name, iso", count="exact") 9 | .lte("numcode", 8) 10 | .gte("numcode", 4) 11 | .execute() 12 | ) 13 | 14 | assert res.count == 2 15 | assert res.data == [ 16 | {"country_name": "AFGHANISTAN", "iso": "AF"}, 17 | {"country_name": "ALBANIA", "iso": "AL"}, 18 | ] 19 | 20 | 21 | def test_match(): 22 | res = ( 23 | rest_client() 24 | .from_("countries") 25 | .select("country_name, iso") 26 | .match({"numcode": 8, "nicename": "Albania"}) 27 | .single() 28 | .execute() 29 | ) 30 | 31 | assert res.data == {"country_name": "ALBANIA", "iso": "AL"} 32 | 33 | 34 | def test_equals(): 35 | res = ( 36 | rest_client() 37 | .from_("countries") 38 | .select("country_name, iso") 39 | .eq("nicename", "Albania") 40 | .single() 41 | .execute() 42 | ) 43 | 44 | assert res.data == {"country_name": "ALBANIA", "iso": "AL"} 45 | 46 | 47 | def test_not_equal(): 48 | res = ( 49 | rest_client() 50 | .from_("users") 51 | .select("id, name") 52 | .neq("name", "Jane") 53 | .single() 54 | .execute() 55 | ) 56 | 57 | assert res.data == {"id": 1, "name": "Michael"} 58 | 59 | 60 | def test_greater_than(): 61 | res = rest_client().from_("users").select("id, name").gt("id", 1).single().execute() 62 | 63 | assert res.data == {"id": 2, "name": "Jane"} 64 | 65 | 66 | def test_greater_than_or_equals_to(): 67 | res = rest_client().from_("users").select("id, name").gte("id", 1).execute() 68 | 69 | assert res.data == [{"id": 1, "name": "Michael"}, {"id": 2, "name": "Jane"}] 70 | 71 | 72 | def test_contains_dictionary(): 73 | res = ( 74 | rest_client() 75 | .from_("users") 76 | .select("name") 77 | .contains("address", {"postcode": 90210}) 78 | .single() 79 | .execute() 80 | ) 81 | 82 | assert res.data == {"name": "Michael"} 83 | 84 | 85 | def test_contains_any_item(): 86 | res = ( 87 | rest_client() 88 | .from_("issues") 89 | .select("title") 90 | .contains("tags", ["is:open", "priority:low"]) 91 | .execute() 92 | ) 93 | 94 | assert res.data == [{"title": "Cache invalidation is not working"}] 95 | 96 | 97 | def test_contains_on_range(): 98 | res = ( 99 | rest_client() 100 | .from_("reservations") 101 | .select("id, room_name") 102 | .contains("during", "[2000-01-01 13:00, 2000-01-01 13:30)") 103 | .execute() 104 | ) 105 | 106 | assert res.data == [{"id": 1, "room_name": "Emerald"}] 107 | 108 | 109 | def test_contained_by_mixed_items(): 110 | res = ( 111 | rest_client() 112 | .from_("reservations") 113 | .select("id, room_name") 114 | .contained_by("during", "[2000-01-01 00:00, 2000-01-01 23:59)") 115 | .execute() 116 | ) 117 | 118 | assert res.data == [{"id": 1, "room_name": "Emerald"}] 119 | 120 | 121 | def test_range_greater_than(): 122 | res = ( 123 | rest_client() 124 | .from_("reservations") 125 | .select("id, room_name") 126 | .range_gt("during", ["2000-01-02 08:00", "2000-01-02 09:00"]) 127 | .execute() 128 | ) 129 | 130 | assert res.data == [{"id": 2, "room_name": "Topaz"}] 131 | 132 | 133 | def test_range_greater_than_or_equal_to(): 134 | res = ( 135 | rest_client() 136 | .from_("reservations") 137 | .select("id, room_name") 138 | .range_gte("during", ["2000-01-02 08:30", "2000-01-02 09:30"]) 139 | .execute() 140 | ) 141 | 142 | assert res.data == [{"id": 2, "room_name": "Topaz"}] 143 | 144 | 145 | def test_range_less_than(): 146 | res = ( 147 | rest_client() 148 | .from_("reservations") 149 | .select("id, room_name") 150 | .range_lt("during", ["2000-01-01 15:00", "2000-01-02 16:00"]) 151 | .execute() 152 | ) 153 | 154 | assert res.data == [{"id": 1, "room_name": "Emerald"}] 155 | 156 | 157 | def test_range_less_than_or_equal_to(): 158 | res = ( 159 | rest_client() 160 | .from_("reservations") 161 | .select("id, room_name") 162 | .range_lte("during", ["2000-01-01 14:00", "2000-01-01 16:00"]) 163 | .execute() 164 | ) 165 | 166 | assert res.data == [{"id": 1, "room_name": "Emerald"}] 167 | 168 | 169 | def test_range_adjacent(): 170 | res = ( 171 | rest_client() 172 | .from_("reservations") 173 | .select("id, room_name") 174 | .range_adjacent("during", ["2000-01-01 12:00", "2000-01-01 13:00"]) 175 | .execute() 176 | ) 177 | 178 | assert res.data == [{"id": 1, "room_name": "Emerald"}] 179 | 180 | 181 | def test_overlaps(): 182 | res = ( 183 | rest_client() 184 | .from_("issues") 185 | .select("title") 186 | .overlaps("tags", ["is:closed", "severity:high"]) 187 | .execute() 188 | ) 189 | 190 | assert res.data == [ 191 | {"title": "Cache invalidation is not working"}, 192 | {"title": "Add alias to filters"}, 193 | ] 194 | 195 | 196 | def test_overlaps_with_timestamp_range(): 197 | res = ( 198 | rest_client() 199 | .from_("reservations") 200 | .select("room_name") 201 | .overlaps("during", "[2000-01-01 12:45, 2000-01-01 13:15)") 202 | .execute() 203 | ) 204 | 205 | assert res.data == [ 206 | {"room_name": "Emerald"}, 207 | ] 208 | 209 | 210 | def test_like(): 211 | res = ( 212 | rest_client() 213 | .from_("countries") 214 | .select("country_name, iso") 215 | .like("nicename", "%Alba%") 216 | .execute() 217 | ) 218 | 219 | assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] 220 | 221 | 222 | def test_ilike(): 223 | res = ( 224 | rest_client() 225 | .from_("countries") 226 | .select("country_name, iso") 227 | .ilike("nicename", "%alban%") 228 | .execute() 229 | ) 230 | 231 | assert res.data == [{"country_name": "ALBANIA", "iso": "AL"}] 232 | 233 | 234 | def test_like_all_of(): 235 | res = ( 236 | rest_client() 237 | .from_("countries") 238 | .select("nicename, iso") 239 | .like_all_of("nicename", "A*,*n") 240 | .execute() 241 | ) 242 | 243 | assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] 244 | 245 | 246 | def test_like_any_of(): 247 | res = ( 248 | rest_client() 249 | .from_("countries") 250 | .select("nicename, iso") 251 | .like_any_of("nicename", "Al*,*ia") 252 | .execute() 253 | ) 254 | 255 | assert res.data == [ 256 | {"iso": "AL", "nicename": "Albania"}, 257 | {"iso": "DZ", "nicename": "Algeria"}, 258 | ] 259 | 260 | 261 | def test_ilike_all_of(): 262 | res = ( 263 | rest_client() 264 | .from_("countries") 265 | .select("nicename, iso") 266 | .ilike_all_of("nicename", "a*,*n") 267 | .execute() 268 | ) 269 | 270 | assert res.data == [{"iso": "AF", "nicename": "Afghanistan"}] 271 | 272 | 273 | def test_ilike_any_of(): 274 | res = ( 275 | rest_client() 276 | .from_("countries") 277 | .select("nicename, iso") 278 | .ilike_any_of("nicename", "al*,*ia") 279 | .execute() 280 | ) 281 | 282 | assert res.data == [ 283 | {"iso": "AL", "nicename": "Albania"}, 284 | {"iso": "DZ", "nicename": "Algeria"}, 285 | ] 286 | 287 | 288 | def test_is_(): 289 | res = ( 290 | rest_client() 291 | .from_("countries") 292 | .select("country_name, iso") 293 | .is_("numcode", "null") 294 | .limit(1) 295 | .order("nicename") 296 | .execute() 297 | ) 298 | 299 | assert res.data == [{"country_name": "ANTARCTICA", "iso": "AQ"}] 300 | 301 | 302 | def test_is_not(): 303 | res = ( 304 | rest_client() 305 | .from_("countries") 306 | .select("country_name, iso") 307 | .not_.is_("numcode", "null") 308 | .limit(1) 309 | .order("nicename") 310 | .execute() 311 | ) 312 | 313 | assert res.data == [{"country_name": "AFGHANISTAN", "iso": "AF"}] 314 | 315 | 316 | def test_in_(): 317 | res = ( 318 | rest_client() 319 | .from_("countries") 320 | .select("country_name, iso") 321 | .in_("nicename", ["Albania", "Algeria"]) 322 | .execute() 323 | ) 324 | 325 | assert res.data == [ 326 | {"country_name": "ALBANIA", "iso": "AL"}, 327 | {"country_name": "ALGERIA", "iso": "DZ"}, 328 | ] 329 | 330 | 331 | def test_or_(): 332 | res = ( 333 | rest_client() 334 | .from_("countries") 335 | .select("country_name, iso") 336 | .or_("iso.eq.DZ,nicename.eq.Albania") 337 | .execute() 338 | ) 339 | 340 | assert res.data == [ 341 | {"country_name": "ALBANIA", "iso": "AL"}, 342 | {"country_name": "ALGERIA", "iso": "DZ"}, 343 | ] 344 | 345 | 346 | def test_or_with_and(): 347 | res = ( 348 | rest_client() 349 | .from_("countries") 350 | .select("country_name, iso") 351 | .or_("phonecode.gt.506,and(iso.eq.AL,nicename.eq.Albania)") 352 | .execute() 353 | ) 354 | 355 | assert res.data == [ 356 | {"country_name": "ALBANIA", "iso": "AL"}, 357 | {"country_name": "TRINIDAD AND TOBAGO", "iso": "TT"}, 358 | ] 359 | 360 | 361 | def test_or_in(): 362 | res = ( 363 | rest_client() 364 | .from_("issues") 365 | .select("id, title") 366 | .or_("id.in.(1,4),tags.cs.{is:open,priority:high}") 367 | .execute() 368 | ) 369 | 370 | assert res.data == [ 371 | {"id": 1, "title": "Cache invalidation is not working"}, 372 | {"id": 3, "title": "Add missing postgrest filters"}, 373 | {"id": 4, "title": "Add alias to filters"}, 374 | ] 375 | 376 | 377 | def test_or_on_reference_table(): 378 | res = ( 379 | rest_client() 380 | .from_("countries") 381 | .select("country_name, cities!inner(name)") 382 | .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") 383 | .execute() 384 | ) 385 | 386 | assert res.data == [ 387 | { 388 | "country_name": "UNITED KINGDOM", 389 | "cities": [ 390 | {"name": "London"}, 391 | {"name": "Manchester"}, 392 | {"name": "Liverpool"}, 393 | {"name": "Bristol"}, 394 | ], 395 | }, 396 | ] 397 | 398 | 399 | def test_explain_json(): 400 | res = ( 401 | rest_client() 402 | .from_("countries") 403 | .select("country_name, cities!inner(name)") 404 | .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") 405 | .explain(format="json", analyze=True) 406 | .execute() 407 | ) 408 | assert res.data[0]["Plan"]["Node Type"] == "Aggregate" 409 | 410 | 411 | def test_csv(): 412 | res = ( 413 | rest_client() 414 | .from_("countries") 415 | .select("country_name, iso") 416 | .in_("nicename", ["Albania", "Algeria"]) 417 | .csv() 418 | .execute() 419 | ) 420 | assert "ALBANIA,AL\nALGERIA,DZ" in res.data 421 | 422 | 423 | def test_explain_text(): 424 | res = ( 425 | rest_client() 426 | .from_("countries") 427 | .select("country_name, cities!inner(name)") 428 | .or_("country_id.eq.10,name.eq.Paris", reference_table="cities") 429 | .explain(analyze=True, verbose=True, settings=True, buffers=True, wal=True) 430 | .execute() 431 | ) 432 | assert ( 433 | "((cities_1.country_id = countries.id) AND ((cities_1.country_id = '10'::bigint) OR (cities_1.name = 'Paris'::text)))" 434 | in res 435 | ) 436 | 437 | 438 | def test_rpc_with_single(): 439 | res = ( 440 | rest_client() 441 | .rpc("list_stored_countries", {}) 442 | .select("nicename, country_name, iso") 443 | .eq("nicename", "Albania") 444 | .single() 445 | .execute() 446 | ) 447 | 448 | assert res.data == {"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"} 449 | 450 | 451 | def test_rpc_with_limit(): 452 | res = ( 453 | rest_client() 454 | .rpc("list_stored_countries", {}) 455 | .select("nicename, country_name, iso") 456 | .eq("nicename", "Albania") 457 | .limit(1) 458 | .execute() 459 | ) 460 | 461 | assert res.data == [{"nicename": "Albania", "country_name": "ALBANIA", "iso": "AL"}] 462 | 463 | 464 | def test_rpc_with_range(): 465 | res = ( 466 | rest_client() 467 | .rpc("list_stored_countries", {}) 468 | .select("nicename, iso") 469 | .range(1, 2) 470 | .execute() 471 | ) 472 | 473 | assert res.data == [ 474 | {"nicename": "Albania", "iso": "AL"}, 475 | {"nicename": "Algeria", "iso": "DZ"}, 476 | ] 477 | 478 | 479 | def test_rpc_post_with_args(): 480 | res = ( 481 | rest_client() 482 | .rpc("search_countries_by_name", {"search_name": "Alban"}) 483 | .select("nicename, iso") 484 | .execute() 485 | ) 486 | assert res.data == [{"nicename": "Albania", "iso": "AL"}] 487 | 488 | 489 | def test_rpc_get_with_args(): 490 | res = ( 491 | rest_client() 492 | .rpc("search_countries_by_name", {"search_name": "Alger"}, get=True) 493 | .select("nicename, iso") 494 | .execute() 495 | ) 496 | assert res.data == [{"nicename": "Algeria", "iso": "DZ"}] 497 | 498 | 499 | def test_rpc_get_with_count(): 500 | res = ( 501 | rest_client() 502 | .rpc("search_countries_by_name", {"search_name": "Al"}, get=True, count="exact") 503 | .select("nicename") 504 | .execute() 505 | ) 506 | assert res.count == 2 507 | assert res.data == [{"nicename": "Albania"}, {"nicename": "Algeria"}] 508 | 509 | 510 | def test_rpc_head_count(): 511 | res = ( 512 | rest_client() 513 | .rpc("search_countries_by_name", {"search_name": "Al"}, head=True, count="exact") 514 | .execute() 515 | ) 516 | 517 | assert res.count == 2 518 | assert res.data == [] 519 | 520 | 521 | def test_order(): 522 | res = ( 523 | rest_client() 524 | .from_("countries") 525 | .select("country_name, iso") 526 | .limit(3) 527 | .order("nicename", desc=True) 528 | .execute() 529 | ) 530 | 531 | assert res.data == [ 532 | {"country_name": "ZIMBABWE", "iso": "ZW"}, 533 | {"country_name": "UNITED STATES", "iso": "US"}, 534 | {"country_name": "UNITED KINGDOM", "iso": "GB"}, 535 | ] 536 | 537 | 538 | def test_order_on_foreign_table(): 539 | res = ( 540 | rest_client() 541 | .from_("orchestral_sections") 542 | .select("name, instruments(name)") 543 | .order("name", desc=True, foreign_table="instruments") 544 | .execute() 545 | ) 546 | 547 | assert res.data == [ 548 | {"name": "strings", "instruments": [{"name": "violin"}, {"name": "harp"}]}, 549 | {"name": "woodwinds", "instruments": []}, 550 | ] 551 | -------------------------------------------------------------------------------- /tests/_sync/test_query_request_builder.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from httpx import Headers, QueryParams 3 | 4 | from postgrest import SyncQueryRequestBuilder 5 | from postgrest.utils import SyncClient 6 | 7 | 8 | @pytest.fixture 9 | def query_request_builder(): 10 | with SyncClient() as client: 11 | yield SyncQueryRequestBuilder( 12 | client, "/example_table", "GET", Headers(), QueryParams(), {} 13 | ) 14 | 15 | 16 | def test_constructor(query_request_builder: SyncQueryRequestBuilder): 17 | builder = query_request_builder 18 | 19 | assert builder.path == "/example_table" 20 | assert len(builder.headers) == 0 21 | assert len(builder.params) == 0 22 | assert builder.http_method == "GET" 23 | assert builder.json == None 24 | -------------------------------------------------------------------------------- /tests/_sync/test_request_builder.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List 2 | 3 | import pytest 4 | from httpx import Request, Response 5 | 6 | from postgrest import SyncRequestBuilder, SyncSingleRequestBuilder 7 | from postgrest.base_request_builder import APIResponse, SingleAPIResponse 8 | from postgrest.types import CountMethod 9 | from postgrest.utils import SyncClient 10 | 11 | 12 | @pytest.fixture 13 | def request_builder(): 14 | with SyncClient() as client: 15 | yield SyncRequestBuilder(client, "/example_table") 16 | 17 | 18 | def test_constructor(request_builder): 19 | assert request_builder.path == "/example_table" 20 | 21 | 22 | class TestSelect: 23 | def test_select(self, request_builder: SyncRequestBuilder): 24 | builder = request_builder.select("col1", "col2") 25 | 26 | assert builder.params["select"] == "col1,col2" 27 | assert builder.headers.get("prefer") is None 28 | assert builder.http_method == "GET" 29 | assert builder.json == None 30 | 31 | def test_select_with_count(self, request_builder: SyncRequestBuilder): 32 | builder = request_builder.select(count=CountMethod.exact) 33 | 34 | assert builder.params["select"] == "*" 35 | assert builder.headers["prefer"] == "count=exact" 36 | assert builder.http_method == "GET" 37 | assert builder.json == None 38 | 39 | def test_select_with_head(self, request_builder: SyncRequestBuilder): 40 | builder = request_builder.select("col1", "col2", head=True) 41 | 42 | assert builder.params.get("select") == "col1,col2" 43 | assert builder.headers.get("prefer") is None 44 | assert builder.http_method == "HEAD" 45 | assert builder.json == None 46 | 47 | def test_select_as_csv(self, request_builder: SyncRequestBuilder): 48 | builder = request_builder.select("*").csv() 49 | 50 | assert builder.headers["Accept"] == "text/csv" 51 | assert isinstance(builder, SyncSingleRequestBuilder) 52 | 53 | 54 | class TestInsert: 55 | def test_insert(self, request_builder: SyncRequestBuilder): 56 | builder = request_builder.insert({"key1": "val1"}) 57 | 58 | assert builder.headers.get_list("prefer", True) == ["return=representation"] 59 | assert builder.http_method == "POST" 60 | assert builder.json == {"key1": "val1"} 61 | 62 | def test_insert_with_count(self, request_builder: SyncRequestBuilder): 63 | builder = request_builder.insert({"key1": "val1"}, count=CountMethod.exact) 64 | 65 | assert builder.headers.get_list("prefer", True) == [ 66 | "return=representation", 67 | "count=exact", 68 | ] 69 | assert builder.http_method == "POST" 70 | assert builder.json == {"key1": "val1"} 71 | 72 | def test_insert_with_upsert(self, request_builder: SyncRequestBuilder): 73 | builder = request_builder.insert({"key1": "val1"}, upsert=True) 74 | 75 | assert builder.headers.get_list("prefer", True) == [ 76 | "return=representation", 77 | "resolution=merge-duplicates", 78 | ] 79 | assert builder.http_method == "POST" 80 | assert builder.json == {"key1": "val1"} 81 | 82 | def test_upsert_with_default_single(self, request_builder: SyncRequestBuilder): 83 | builder = request_builder.upsert([{"key1": "val1"}], default_to_null=False) 84 | assert builder.headers.get_list("prefer", True) == [ 85 | "return=representation", 86 | "resolution=merge-duplicates", 87 | "missing=default", 88 | ] 89 | assert builder.http_method == "POST" 90 | assert builder.json == [{"key1": "val1"}] 91 | assert builder.params.get("columns") == '"key1"' 92 | 93 | def test_bulk_insert_using_default(self, request_builder: SyncRequestBuilder): 94 | builder = request_builder.insert( 95 | [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False 96 | ) 97 | assert builder.headers.get_list("prefer", True) == [ 98 | "return=representation", 99 | "missing=default", 100 | ] 101 | assert builder.http_method == "POST" 102 | assert builder.json == [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}] 103 | assert set(builder.params["columns"].split(",")) == set( 104 | '"key1","key2","key3"'.split(",") 105 | ) 106 | 107 | def test_upsert(self, request_builder: SyncRequestBuilder): 108 | builder = request_builder.upsert({"key1": "val1"}) 109 | 110 | assert builder.headers.get_list("prefer", True) == [ 111 | "return=representation", 112 | "resolution=merge-duplicates", 113 | ] 114 | assert builder.http_method == "POST" 115 | assert builder.json == {"key1": "val1"} 116 | 117 | def test_bulk_upsert_with_default(self, request_builder: SyncRequestBuilder): 118 | builder = request_builder.upsert( 119 | [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}], default_to_null=False 120 | ) 121 | assert builder.headers.get_list("prefer", True) == [ 122 | "return=representation", 123 | "resolution=merge-duplicates", 124 | "missing=default", 125 | ] 126 | assert builder.http_method == "POST" 127 | assert builder.json == [{"key1": "val1", "key2": "val2"}, {"key3": "val3"}] 128 | assert set(builder.params["columns"].split(",")) == set( 129 | '"key1","key2","key3"'.split(",") 130 | ) 131 | 132 | 133 | class TestUpdate: 134 | def test_update(self, request_builder: SyncRequestBuilder): 135 | builder = request_builder.update({"key1": "val1"}) 136 | 137 | assert builder.headers.get_list("prefer", True) == ["return=representation"] 138 | assert builder.http_method == "PATCH" 139 | assert builder.json == {"key1": "val1"} 140 | 141 | def test_update_with_count(self, request_builder: SyncRequestBuilder): 142 | builder = request_builder.update({"key1": "val1"}, count=CountMethod.exact) 143 | 144 | assert builder.headers.get_list("prefer", True) == [ 145 | "return=representation", 146 | "count=exact", 147 | ] 148 | assert builder.http_method == "PATCH" 149 | assert builder.json == {"key1": "val1"} 150 | 151 | 152 | class TestDelete: 153 | def test_delete(self, request_builder: SyncRequestBuilder): 154 | builder = request_builder.delete() 155 | 156 | assert builder.headers.get_list("prefer", True) == ["return=representation"] 157 | assert builder.http_method == "DELETE" 158 | assert builder.json == {} 159 | 160 | def test_delete_with_count(self, request_builder: SyncRequestBuilder): 161 | builder = request_builder.delete(count=CountMethod.exact) 162 | 163 | assert builder.headers.get_list("prefer", True) == [ 164 | "return=representation", 165 | "count=exact", 166 | ] 167 | assert builder.http_method == "DELETE" 168 | assert builder.json == {} 169 | 170 | 171 | class TestTextSearch: 172 | def test_text_search(self, request_builder: SyncRequestBuilder): 173 | builder = request_builder.select("catchphrase").text_search( 174 | "catchphrase", 175 | "'fat' & 'cat'", 176 | { 177 | "type": "plain", 178 | "config": "english", 179 | }, 180 | ) 181 | assert "catchphrase=plfts%28english%29.%27fat%27+%26+%27cat%27" in str( 182 | builder.params 183 | ) 184 | 185 | 186 | class TestExplain: 187 | def test_explain_plain(self, request_builder: SyncRequestBuilder): 188 | builder = request_builder.select("*").explain() 189 | assert builder.params["select"] == "*" 190 | assert "application/vnd.pgrst.plan" in str(builder.headers.get("accept")) 191 | 192 | def test_explain_options(self, request_builder: SyncRequestBuilder): 193 | builder = request_builder.select("*").explain( 194 | format="json", analyze=True, verbose=True, buffers=True, wal=True 195 | ) 196 | assert builder.params["select"] == "*" 197 | assert "application/vnd.pgrst.plan+json;" in str(builder.headers.get("accept")) 198 | assert "options=analyze|verbose|buffers|wal" in str(builder.headers.get("accept")) 199 | 200 | 201 | class TestOrder: 202 | def test_order(self, request_builder: SyncRequestBuilder): 203 | builder = request_builder.select().order("country_name", desc=True) 204 | assert str(builder.params) == "select=%2A&order=country_name.desc" 205 | 206 | def test_multiple_orders(self, request_builder: SyncRequestBuilder): 207 | builder = ( 208 | request_builder.select() 209 | .order("country_name", desc=True) 210 | .order("iso", desc=True) 211 | ) 212 | assert str(builder.params) == "select=%2A&order=country_name.desc%2Ciso.desc" 213 | 214 | def test_multiple_orders_on_foreign_table(self, request_builder: SyncRequestBuilder): 215 | foreign_table = "cities" 216 | builder = ( 217 | request_builder.select() 218 | .order("city_name", desc=True, foreign_table=foreign_table) 219 | .order("id", desc=True, foreign_table=foreign_table) 220 | ) 221 | assert str(builder.params) == "select=%2A&cities.order=city_name.desc%2Cid.desc" 222 | 223 | 224 | class TestRange: 225 | def test_range_on_own_table(self, request_builder: SyncRequestBuilder): 226 | builder = request_builder.select("*").range(0, 1) 227 | assert builder.params["select"] == "*" 228 | assert builder.params["limit"] == "2" 229 | assert builder.params["offset"] == "0" 230 | 231 | def test_range_on_foreign_table(self, request_builder: SyncRequestBuilder): 232 | foreign_table = "cities" 233 | builder = request_builder.select("*").range(1, 2, foreign_table) 234 | assert builder.params["select"] == "*" 235 | assert builder.params[f"{foreign_table}.limit"] == "2" 236 | assert builder.params[f"{foreign_table}.offset"] == "1" 237 | 238 | 239 | @pytest.fixture 240 | def csv_api_response() -> str: 241 | return "id,name\n1,foo\n" 242 | 243 | 244 | @pytest.fixture 245 | def api_response_with_error() -> Dict[str, Any]: 246 | return { 247 | "message": "Route GET:/countries?select=%2A not found", 248 | "error": "Not Found", 249 | "statusCode": 404, 250 | } 251 | 252 | 253 | @pytest.fixture 254 | def api_response() -> List[Dict[str, Any]]: 255 | return [ 256 | { 257 | "id": 1, 258 | "name": "Bonaire, Sint Eustatius and Saba", 259 | "iso2": "BQ", 260 | "iso3": "BES", 261 | "local_name": None, 262 | "continent": None, 263 | }, 264 | { 265 | "id": 2, 266 | "name": "Curaçao", 267 | "iso2": "CW", 268 | "iso3": "CUW", 269 | "local_name": None, 270 | "continent": None, 271 | }, 272 | ] 273 | 274 | 275 | @pytest.fixture 276 | def single_api_response() -> Dict[str, Any]: 277 | return { 278 | "id": 1, 279 | "name": "Bonaire, Sint Eustatius and Saba", 280 | "iso2": "BQ", 281 | "iso3": "BES", 282 | "local_name": None, 283 | "continent": None, 284 | } 285 | 286 | 287 | @pytest.fixture 288 | def content_range_header_with_count() -> str: 289 | return "0-1/2" 290 | 291 | 292 | @pytest.fixture 293 | def content_range_header_without_count() -> str: 294 | return "0-1" 295 | 296 | 297 | @pytest.fixture 298 | def prefer_header_with_count() -> str: 299 | return "count=exact" 300 | 301 | 302 | @pytest.fixture 303 | def prefer_header_without_count() -> str: 304 | return "random prefer header" 305 | 306 | 307 | @pytest.fixture 308 | def request_response_without_prefer_header() -> Response: 309 | return Response( 310 | status_code=200, request=Request(method="GET", url="http://example.com") 311 | ) 312 | 313 | 314 | @pytest.fixture 315 | def request_response_with_prefer_header_without_count( 316 | prefer_header_without_count: str, 317 | ) -> Response: 318 | return Response( 319 | status_code=200, 320 | request=Request( 321 | method="GET", 322 | url="http://example.com", 323 | headers={"prefer": prefer_header_without_count}, 324 | ), 325 | ) 326 | 327 | 328 | @pytest.fixture 329 | def request_response_with_prefer_header_with_count_and_content_range( 330 | prefer_header_with_count: str, content_range_header_with_count: str 331 | ) -> Response: 332 | return Response( 333 | status_code=200, 334 | headers={"content-range": content_range_header_with_count}, 335 | request=Request( 336 | method="GET", 337 | url="http://example.com", 338 | headers={"prefer": prefer_header_with_count}, 339 | ), 340 | ) 341 | 342 | 343 | @pytest.fixture 344 | def request_response_with_data( 345 | prefer_header_with_count: str, 346 | content_range_header_with_count: str, 347 | api_response: List[Dict[str, Any]], 348 | ) -> Response: 349 | return Response( 350 | status_code=200, 351 | headers={"content-range": content_range_header_with_count}, 352 | json=api_response, 353 | request=Request( 354 | method="GET", 355 | url="http://example.com", 356 | headers={"prefer": prefer_header_with_count}, 357 | ), 358 | ) 359 | 360 | 361 | @pytest.fixture 362 | def request_response_with_single_data( 363 | prefer_header_with_count: str, 364 | content_range_header_with_count: str, 365 | single_api_response: Dict[str, Any], 366 | ) -> Response: 367 | return Response( 368 | status_code=200, 369 | headers={"content-range": content_range_header_with_count}, 370 | json=single_api_response, 371 | request=Request( 372 | method="GET", 373 | url="http://example.com", 374 | headers={"prefer": prefer_header_with_count}, 375 | ), 376 | ) 377 | 378 | 379 | @pytest.fixture 380 | def request_response_with_csv_data(csv_api_response: str) -> Response: 381 | return Response( 382 | status_code=200, 383 | text=csv_api_response, 384 | request=Request(method="GET", url="http://example.com"), 385 | ) 386 | 387 | 388 | class TestApiResponse: 389 | def test_response_raises_when_api_error( 390 | self, api_response_with_error: Dict[str, Any] 391 | ): 392 | with pytest.raises(ValueError): 393 | APIResponse(data=api_response_with_error) 394 | 395 | def test_parses_valid_response_only_data(self, api_response: List[Dict[str, Any]]): 396 | result = APIResponse(data=api_response) 397 | assert result.data == api_response 398 | 399 | def test_parses_valid_response_data_and_count( 400 | self, api_response: List[Dict[str, Any]] 401 | ): 402 | count = len(api_response) 403 | result = APIResponse(data=api_response, count=count) 404 | assert result.data == api_response 405 | assert result.count == count 406 | 407 | def test_get_count_from_content_range_header_with_count( 408 | self, content_range_header_with_count: str 409 | ): 410 | assert ( 411 | APIResponse._get_count_from_content_range_header( 412 | content_range_header_with_count 413 | ) 414 | == 2 415 | ) 416 | 417 | def test_get_count_from_content_range_header_without_count( 418 | self, content_range_header_without_count: str 419 | ): 420 | assert ( 421 | APIResponse._get_count_from_content_range_header( 422 | content_range_header_without_count 423 | ) 424 | is None 425 | ) 426 | 427 | def test_is_count_in_prefer_header_true(self, prefer_header_with_count: str): 428 | assert APIResponse._is_count_in_prefer_header(prefer_header_with_count) 429 | 430 | def test_is_count_in_prefer_header_false(self, prefer_header_without_count: str): 431 | assert not APIResponse._is_count_in_prefer_header(prefer_header_without_count) 432 | 433 | def test_get_count_from_http_request_response_without_prefer_header( 434 | self, request_response_without_prefer_header: Response 435 | ): 436 | assert ( 437 | APIResponse._get_count_from_http_request_response( 438 | request_response_without_prefer_header 439 | ) 440 | is None 441 | ) 442 | 443 | def test_get_count_from_http_request_response_with_prefer_header_without_count( 444 | self, request_response_with_prefer_header_without_count: Response 445 | ): 446 | assert ( 447 | APIResponse._get_count_from_http_request_response( 448 | request_response_with_prefer_header_without_count 449 | ) 450 | is None 451 | ) 452 | 453 | def test_get_count_from_http_request_response_with_count_and_content_range( 454 | self, request_response_with_prefer_header_with_count_and_content_range: Response 455 | ): 456 | assert ( 457 | APIResponse._get_count_from_http_request_response( 458 | request_response_with_prefer_header_with_count_and_content_range 459 | ) 460 | == 2 461 | ) 462 | 463 | def test_from_http_request_response_constructor( 464 | self, request_response_with_data: Response, api_response: List[Dict[str, Any]] 465 | ): 466 | result = APIResponse.from_http_request_response(request_response_with_data) 467 | assert result.data == api_response 468 | assert result.count == 2 469 | 470 | def test_single_from_http_request_response_constructor( 471 | self, 472 | request_response_with_single_data: Response, 473 | single_api_response: Dict[str, Any], 474 | ): 475 | result = SingleAPIResponse.from_http_request_response( 476 | request_response_with_single_data 477 | ) 478 | assert isinstance(result.data, dict) 479 | assert result.data == single_api_response 480 | assert result.count == 2 481 | 482 | def test_single_with_csv_data( 483 | self, request_response_with_csv_data: Response, csv_api_response: str 484 | ): 485 | result = SingleAPIResponse.from_http_request_response( 486 | request_response_with_csv_data 487 | ) 488 | assert isinstance(result.data, str) 489 | assert result.data == csv_api_response 490 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from postgrest.utils import sanitize_param 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "value, expected", 8 | [ 9 | ("param,name", '"param,name"'), 10 | ("param:name", '"param:name"'), 11 | ("param(name", '"param(name"'), 12 | ("param)name", '"param)name"'), 13 | ("param,name", '"param,name"'), 14 | ("table.column", "table.column"), 15 | ("table_column", "table_column"), 16 | ], 17 | ) 18 | def test_sanitize_params(value, expected): 19 | assert sanitize_param(value) == expected 20 | --------------------------------------------------------------------------------