├── .github ├── CODEOWNERS ├── dependabot.yaml └── workflows │ ├── cd.yaml │ ├── ci.yaml │ ├── docs.yml │ ├── pr-title.yaml │ └── release.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── .sourcery.yaml ├── .vscode ├── launch.json └── settings.json ├── CODEOWNERS ├── CONTRIBUTING.rst ├── LICENSE ├── Makefile ├── README.md ├── docs ├── .gitignore ├── __init__.py ├── _static │ ├── .gitkeep │ ├── custom.css │ └── logo.svg ├── changelog.rst ├── conf.py ├── contribution-guide.rst ├── getting-started │ ├── basic-usage.rst │ ├── configuration.rst │ ├── enabling-plugins.rst │ ├── index.rst │ ├── installation.rst │ └── next-steps.rst ├── index.rst └── supported-databases │ ├── azure_blob_storage.rst │ ├── bigquery.rst │ ├── cockroachdb.rst │ ├── elasticsearch.rst │ ├── index.rst │ ├── mariadb.rst │ ├── minio.rst │ ├── mysql.rst │ ├── oracle.rst │ ├── postgres.rst │ ├── redis.rst │ ├── spanner.rst │ ├── sqlserver.rst │ └── valkey.rst ├── pyproject.toml ├── scripts ├── __init__.py ├── build_docs.py ├── convert_docs.sh └── install-hatch.sh ├── sonar-project.properties ├── src └── pytest_databases │ ├── __init__.py │ ├── __metadata__.py │ ├── _service.py │ ├── docker │ ├── __init__.py │ ├── azure_blob.py │ ├── bigquery.py │ ├── cockroachdb.py │ ├── elastic_search.py │ ├── mariadb.py │ ├── minio.py │ ├── mssql.py │ ├── mysql.py │ ├── oracle.py │ ├── postgres.py │ ├── redis.py │ ├── spanner.py │ └── valkey.py │ ├── helpers.py │ ├── py.typed │ └── types.py ├── tests ├── __init__.py ├── conftest.py ├── test_azure_blob.py ├── test_bigquery.py ├── test_cockroachdb.py ├── test_elasticsearch.py ├── test_mariadb.py ├── test_minio.py ├── test_mssql.py ├── test_mysql.py ├── test_oracle.py ├── test_postgres.py ├── test_redis.py ├── test_spanner.py └── test_valkey.py └── uv.lock /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Code owner settings for `litestar-org` 2 | 3 | # @maintainers should be assigned to all reviews 4 | 5 | # Most specific assignment takes precedence though, so if you add a more specific thing than the `*` glob, you must also add @maintainers 6 | 7 | # For more info about code owners see 8 | 9 | # Global Assignment 10 | 11 | * @litestar-org/maintainers @litestar-org/members 12 | -------------------------------------------------------------------------------- /.github/dependabot.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | -------------------------------------------------------------------------------- /.github/workflows/cd.yaml: -------------------------------------------------------------------------------- 1 | name: Continuous Deployment 2 | 3 | on: 4 | push: 5 | tags: 6 | - "v*.*.*" 7 | 8 | jobs: 9 | generate-changelog: 10 | name: Generate changelog 11 | runs-on: ubuntu-22.04 12 | outputs: 13 | release_body: ${{ steps.git-cliff.outputs.content }} 14 | steps: 15 | - name: Checkout 16 | uses: actions/checkout@v4 17 | with: 18 | fetch-depth: 0 19 | 20 | - name: Generate a changelog 21 | uses: orhun/git-cliff-action@main 22 | id: git-cliff 23 | with: 24 | config: pyproject.toml 25 | args: -vv --latest --strip header 26 | env: 27 | OUTPUT: docs/CHANGELOG.rst 28 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: Tests And Linting 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | concurrency: 10 | group: test-${{ github.head_ref }} 11 | cancel-in-progress: true 12 | 13 | env: 14 | PYTHONUNBUFFERED: "1" 15 | FORCE_COLOR: "1" 16 | 17 | jobs: 18 | test: 19 | name: Python ${{ matrix.python-version }} - ${{ matrix.cdist-group }}/3 20 | runs-on: ubuntu-latest 21 | timeout-minutes: 10 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | python-version: ["3.9", "3.10", "3.11", "3.12"] 26 | cdist-group: [1, 2, 3] 27 | 28 | steps: 29 | - uses: actions/checkout@v4 30 | 31 | - name: Install additional dependencies 32 | run: sudo apt-get update && sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18 libmariadb-dev && sudo apt-get autoremove -y && sudo apt-get clean -y 33 | 34 | - name: Set up Python ${{ matrix.python-version }} 35 | uses: actions/setup-python@v5 36 | with: 37 | python-version: ${{ matrix.python-version }} 38 | 39 | - name: Install uv 40 | uses: astral-sh/setup-uv@v6 41 | 42 | - name: Intall dependencies 43 | run: uv sync --frozen 44 | 45 | - if: matrix.python-version == '3.12' 46 | name: Run tests with coverage tracking 47 | run: uv run pytest --cdist-group=${{ matrix.cdist-group }}/3 -k "not elasticsearch" 48 | 49 | - if: matrix.python-version != '3.12' 50 | name: Run tests without tracking coverage 51 | run: uv run pytest --cdist-group=${{ matrix.cdist-group }}/3 -k "not elasticsearch" 52 | 53 | - if: matrix.python-version == '3.12' 54 | uses: actions/upload-artifact@v4 55 | with: 56 | name: coverage-xml 57 | path: coverage.xml 58 | 59 | - if: matrix.python-version == '3.12' 60 | name: Upload coverage reports to Codecov 61 | uses: codecov/codecov-action@v5.4.3 62 | with: 63 | token: ${{ secrets.CODECOV_TOKEN }} 64 | slug: litestar-org/pytest-databases 65 | 66 | # run elasticsearch in a separate step. it's too slow 67 | test_elasticsearch: 68 | runs-on: ubuntu-latest 69 | steps: 70 | - name: Install additional dependencies 71 | run: sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18 libmariadb-dev 72 | 73 | - uses: actions/checkout@v4 74 | 75 | - name: Set up Python 76 | uses: actions/setup-python@v5 77 | with: 78 | python-version: 3.9 79 | 80 | - name: Install uv 81 | uses: astral-sh/setup-uv@v6 82 | 83 | - name: Intall dependencies 84 | run: uv sync --frozen 85 | 86 | - name: Run tests with coverage tracking 87 | run: uv run pytest -k elasticsearch 88 | 89 | # sonar: 90 | # needs: 91 | # - test 92 | # - test_elasticsearch 93 | # if: github.event.pull_request.head.repo.fork == false && github.repository_owner == 'litestar-org' 94 | # runs-on: ubuntu-latest 95 | # steps: 96 | # - name: Check out repository 97 | # uses: actions/checkout@v4 98 | # with: 99 | # fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis 100 | # - name: Download Artifacts 101 | # uses: actions/download-artifact@v4 102 | # with: 103 | # name: coverage-xml 104 | 105 | # - name: Fix coverage file for sonarcloud 106 | # run: sed -i "s/home\/runner\/work\/pytest-databases\/pytest-databases/github\/workspace/g" coverage.xml 107 | 108 | # - name: SonarCloud Scan 109 | # uses: sonarsource/sonarcloud-github-action@master 110 | # env: 111 | # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 112 | # SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} 113 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Documentation Building 2 | 3 | on: 4 | release: 5 | types: [published] 6 | push: 7 | branches: 8 | - main 9 | # Allows you to run this workflow manually from the Actions tab 10 | workflow_dispatch: 11 | 12 | jobs: 13 | build_and_deploy: 14 | permissions: 15 | contents: write 16 | pages: write 17 | id-token: write 18 | environment: 19 | name: github-pages 20 | url: ${{ steps.deployment.outputs.page_url }} 21 | runs-on: ubuntu-latest 22 | steps: 23 | - uses: actions/checkout@v4 24 | 25 | - name: Install additional dependencies 26 | run: sudo apt-get update && sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18 libmariadb-dev && sudo apt-get autoremove -y && sudo apt-get clean -y 27 | 28 | - name: Set up Python ${{ matrix.python-version }} 29 | uses: actions/setup-python@v5 30 | with: 31 | python-version: 3.12 32 | 33 | - name: Install uv 34 | uses: astral-sh/setup-uv@v6 35 | 36 | - name: Set up Python 37 | run: uv python install 3.12 38 | 39 | - name: Install dependencies 40 | run: uv sync --all-extras --dev 41 | 42 | - name: Build Release Documentation 43 | run: uv run scripts/build_docs.py dist/docs 44 | 45 | - name: Upload artifact 46 | uses: actions/upload-pages-artifact@v3 47 | with: 48 | path: dist/docs/ 49 | 50 | - name: Deploy to GitHub Pages 51 | id: deployment 52 | uses: actions/deploy-pages@v4 53 | -------------------------------------------------------------------------------- /.github/workflows/pr-title.yaml: -------------------------------------------------------------------------------- 1 | name: "Lint PR Title" 2 | 3 | on: 4 | pull_request_target: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | 10 | permissions: 11 | pull-requests: read 12 | 13 | jobs: 14 | main: 15 | name: Validate PR title 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: amannn/action-semantic-pull-request@v5 19 | env: 20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 21 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Latest Release 2 | 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | publish-release: 10 | runs-on: ubuntu-latest 11 | permissions: 12 | id-token: write 13 | environment: release 14 | steps: 15 | - name: Install additional dependencies 16 | run: sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18 libmariadb-dev 17 | 18 | - name: Check out repository 19 | uses: actions/checkout@v4 20 | 21 | - name: Install uv 22 | uses: astral-sh/setup-uv@v6 23 | 24 | - name: Set up Python 25 | run: uv python install 3.12 26 | 27 | - name: Install dependencies 28 | run: uv sync --all-extras 29 | 30 | - name: Build package 31 | run: uv build 32 | 33 | - name: Publish package distributions to PyPI 34 | uses: pypa/gh-action-pypi-publish@release/v1 35 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | !resources/lib 20 | !src/**/*/lib 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | .pdm-python 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | 62 | # Flask stuff: 63 | instance/ 64 | .webassets-cache 65 | 66 | # Scrapy stuff: 67 | .scrapy 68 | 69 | # Sphinx documentation 70 | docs/_build/ 71 | 72 | # PyBuilder 73 | target/ 74 | !src/server/dbma/transformer/schemas/*/sql/target 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # IPython 80 | profile_default/ 81 | ipython_config.py 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # pipenv 87 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 88 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 89 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 90 | # install all needed dependencies. 91 | #Pipfile.lock 92 | 93 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 94 | __pypackages__/ 95 | 96 | # celery beat schedule file 97 | celerybeat-schedule 98 | celerybeat.pid 99 | 100 | # SageMath parsed files 101 | *.sage.py 102 | 103 | # dotenv 104 | .env 105 | !.env.example 106 | env.bak/ 107 | service_account.json 108 | 109 | # virtualenv 110 | .venv 111 | venv/ 112 | ENV/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # IDE settings 130 | .vscode/* 131 | !.vscode/settings.json 132 | !.vscode/extensions.json 133 | !.vscode/launch.json 134 | .idea/ 135 | !.vscode/settings.json 136 | !.vscode/launch.json 137 | # Version file generated by hatch-vcs 138 | src/pytest_databases/_version.py 139 | 140 | .ruff_cache 141 | 142 | 143 | # App specifics 144 | .env 145 | *.log 146 | log/* 147 | *.csv 148 | tmp/* 149 | node_modules 150 | .astro 151 | .vite 152 | .npmrc 153 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: "^docs/conf.py" 2 | 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v5.0.0 6 | hooks: 7 | - id: trailing-whitespace 8 | - id: check-added-large-files 9 | - id: check-ast 10 | - id: check-json 11 | - id: check-merge-conflict 12 | - id: check-xml 13 | - id: check-yaml 14 | args: ["--unsafe"] # needed for !! tags in mkdocs.yml 15 | - id: debug-statements 16 | - id: end-of-file-fixer 17 | - id: mixed-line-ending 18 | args: ["--fix=auto"] # replace 'auto' with 'lf' to enforce Linux/Mac line endings or 'crlf' for Windows 19 | 20 | # Ruff replaces black, flake8, autoflake and isort 21 | - repo: https://github.com/charliermarsh/ruff-pre-commit 22 | rev: "v0.11.11" # make sure this is always consistent with hatch configs 23 | hooks: 24 | - id: ruff 25 | args: [--config, ./pyproject.toml] 26 | 27 | - repo: https://github.com/pre-commit/mirrors-prettier 28 | rev: v4.0.0-alpha.8 29 | hooks: 30 | - id: prettier 31 | exclude: templates|migrations|scripts|docs|dist|.venv|public 32 | -------------------------------------------------------------------------------- /.sourcery.yaml: -------------------------------------------------------------------------------- 1 | ignore: 2 | - .tox/ 3 | - .venv/ 4 | - dist/ 5 | - docs/_build/ 6 | - docs/_static/ 7 | - node_modules/ 8 | - vendor/ 9 | - venv/ 10 | 11 | rule_settings: 12 | enable: [default] 13 | disable: [dont-import-test-modules] 14 | rule_types: 15 | - refactoring 16 | - suggestion 17 | - comment 18 | python_version: "3.8" 19 | 20 | rules: [] 21 | 22 | metrics: 23 | quality_threshold: 25.0 24 | 25 | github: 26 | ignore_labels: 27 | - sourcery-ignore 28 | - docs 29 | labels: 30 | - build-ignore 31 | request_review: 32 | origin: owner 33 | forked: author 34 | sourcery_branch: sourcery/{base_branch} 35 | 36 | clone_detection: 37 | min_lines: 3 38 | min_duplicates: 2 39 | identical_clones_only: false 40 | 41 | proxy: 42 | no_ssl_verify: false 43 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Python Debugger: Current File", 6 | "type": "debugpy", 7 | "request": "launch", 8 | "program": "${file}", 9 | "console": "integratedTerminal", 10 | "justMyCode": false 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.exclude": { 3 | "**/._*": true, 4 | "**/*.pyc": { 5 | "when": "$(basename).py" 6 | }, 7 | ".coverage.*": true, 8 | ".mypy_cache": true, 9 | "**/__pycache__": true, 10 | ".venv": true, 11 | ".direnv": true, 12 | ".idea": true, 13 | ".run": true, 14 | ".pytest_cache": true, 15 | ".cache": true, 16 | ".dist": true, 17 | "**/.pytest_cache": true, 18 | "site": true, 19 | ".angular": true, 20 | ".ruff_cache": true, 21 | ".astro": true, 22 | ".unasyncd_cache": true, 23 | ".coverage": true, 24 | "node_modules": true, 25 | ".terraform": true 26 | }, 27 | "mypy-type-checker.importStrategy": "fromEnvironment", 28 | "pylint.importStrategy": "fromEnvironment", 29 | "python.autoComplete.extraPaths": ["${workspaceFolder}/src"], 30 | "python.terminal.activateEnvInCurrentTerminal": true, 31 | "python.terminal.executeInFileDir": true, 32 | "python.testing.pytestEnabled": true, 33 | "autoDocstring.guessTypes": false, 34 | "python.analysis.autoImportCompletions": true, 35 | "python.analysis.autoFormatStrings": true, 36 | "editor.formatOnSave": true, 37 | "notebook.formatOnSave.enabled": true, 38 | "black-formatter.args": ["--line-length=120"], 39 | "evenBetterToml.formatter.reorderKeys": true, 40 | "evenBetterToml.formatter.trailingNewline": true, 41 | "evenBetterToml.formatter.columnWidth": 120, 42 | "evenBetterToml.formatter.arrayAutoCollapse": true, 43 | "python.globalModuleInstallation": false, 44 | "python.testing.unittestEnabled": false, 45 | "editor.codeActionsOnSave": { 46 | "source.fixAll.ruff": "explicit", 47 | "source.organizeImports.ruff": "explicit" 48 | }, 49 | "[python]": { 50 | "editor.formatOnSave": true, 51 | "editor.formatOnSaveMode": "file", 52 | "editor.insertSpaces": true, 53 | "editor.tabSize": 4, 54 | "editor.trimAutoWhitespace": true, 55 | "editor.defaultFormatter": "charliermarsh.ruff", 56 | "editor.codeActionsOnSave": { 57 | "source.fixAll": "explicit", 58 | "source.organizeImports": "explicit" 59 | } 60 | }, 61 | "python.analysis.fixAll": [ 62 | "source.unusedImports", 63 | "source.convertImportFormat" 64 | ], 65 | "sqltools.disableReleaseNotifications": true, 66 | "sqltools.disableNodeDetectNotifications": true, 67 | "cloudcode.duetAI.enable": true, 68 | "cloudcode.compute.sshInternalIp": true, 69 | "python.testing.pytestArgs": ["tests"], 70 | "markdownlint.run": "onSave", 71 | "markdownlint.config": { 72 | "default": true, 73 | "MD046": { 74 | "style": "fenced" 75 | }, 76 | "MD007": { 77 | "indent": 4 78 | }, 79 | "no-hard-tabs": false 80 | }, 81 | "yaml.schemas": { 82 | "https://squidfunk.github.io/mkdocs-material/schema.json": "mkdocs.yml" 83 | }, 84 | "yaml.customTags": [ 85 | "!ENV scalar", 86 | "!ENV sequence", 87 | "!relative scalar", 88 | "tag:yaml.org,2002:python/name:material.extensions.emoji.to_svg", 89 | "tag:yaml.org,2002:python/name:material.extensions.emoji.twemoji", 90 | "tag:yaml.org,2002:python/name:pymdownx.superfences.fence_code_format" 91 | ], 92 | "python.analysis.extraPaths": ["${workspaceFolder}/src"] 93 | } 94 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Code owner settings for `litestar-org` 2 | # @maintainers should be assigned to all reviews. 3 | # Most specific assignment takes precedence though, so if you add a more specific thing than the `*` glob, you must also add @maintainers 4 | # For more info about code owners see https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-file-example 5 | 6 | # Global Assignment 7 | * @litestar-org/maintainers @litestar-org/members 8 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contribution guide 2 | ================== 3 | 4 | Setting up the environment 5 | -------------------------- 6 | 7 | 1. `Install MariaDB `_ 8 | 2. ``make install`` 9 | 10 | Code contributions 11 | ------------------ 12 | 13 | Workflow 14 | ++++++++ 15 | 16 | 1. `Fork `_ the `Pytest Database Alchemy repository `_ 17 | 2. Clone your fork locally with git 18 | 3. `Set up the environment <#setting-up-the-environment>`_ 19 | 4. Make your changes 20 | 5. (Optional) Run ``pre-commit run --all-files`` to run linters and formatters. This step is optional and will be executed 21 | automatically by git before you make a commit, but you may want to run it manually in order to apply fixes 22 | 6. Commit your changes to git 23 | 7. Push the changes to your fork 24 | 8. Open a `pull request `_. Give the pull request a descriptive title 25 | indicating what it changes. If it has a corresponding open issue, the issue number should be included in the title as 26 | well. For example a pull request that fixes issue ``bug: Increased stack size making it impossible to find needle #100`` 27 | could be titled ``fix(#100): Make needles easier to find by applying fire to haystack`` 28 | 29 | .. tip:: Pull requests and commits all need to follow the 30 | `Conventional Commit format `_ 31 | 32 | .. note:: To run the integration tests locally, you will need the `ODBC Driver for SQL Server `_, one option is using `unixODBC `_. 33 | 34 | Guidelines for writing code 35 | ---------------------------- 36 | 37 | - All code should be fully `typed `_. This is enforced via 38 | `mypy `_. 39 | - All code should be tested. This is enforced via `pytest `_. 40 | - All code should be properly formatted. This is enforced via `black `_ and `Ruff `_. 41 | 42 | Writing and running tests 43 | +++++++++++++++++++++++++ 44 | 45 | Coming soon. 46 | 47 | Project documentation 48 | --------------------- 49 | 50 | The documentation is located in the ``/docs`` directory and is `ReST `_ and 51 | `Sphinx `_. If you're unfamiliar with any of those, 52 | `ReStructuredText primer `_ and 53 | `Sphinx quickstart `_ are recommended reads. 54 | 55 | Running the docs locally 56 | ++++++++++++++++++++++++ 57 | 58 | You can serve the documentation with ``make serve-docs``, or build them with ``make docs``. 59 | 60 | Creating a new release 61 | ---------------------- 62 | 63 | 1. Increment the version in `pyproject.toml `_. 64 | .. note:: The version should follow `semantic versioning `_ and `PEP 440 `_. 65 | 2. `Draft a new release `_ on GitHub 66 | 67 | * Use ``vMAJOR.MINOR.PATCH`` (e.g. ``v1.2.3``) as both the tag and release title 68 | * Fill in the release description. You can use the "Generate release notes" function to get a draft for this 69 | 3. Commit your changes and push to ``main`` 70 | 4. Publish the release 71 | 5. Go to `Actions `_ and approve the release workflow 72 | 6. Check that the workflow runs successfully 73 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Litestar 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | SHELL := /bin/bash 2 | 3 | # ============================================================================= 4 | # Configuration and Environment Variables 5 | # ============================================================================= 6 | 7 | .DEFAULT_GOAL:=help 8 | .ONESHELL: 9 | .EXPORT_ALL_VARIABLES: 10 | MAKEFLAGS += --no-print-directory 11 | 12 | # ----------------------------------------------------------------------------- 13 | # Display Formatting and Colors 14 | # ----------------------------------------------------------------------------- 15 | BLUE := $(shell printf "\033[1;34m") 16 | GREEN := $(shell printf "\033[1;32m") 17 | RED := $(shell printf "\033[1;31m") 18 | YELLOW := $(shell printf "\033[1;33m") 19 | NC := $(shell printf "\033[0m") 20 | INFO := $(shell printf "$(BLUE)ℹ$(NC)") 21 | OK := $(shell printf "$(GREEN)✓$(NC)") 22 | WARN := $(shell printf "$(YELLOW)⚠$(NC)") 23 | ERROR := $(shell printf "$(RED)✖$(NC)") 24 | 25 | # ============================================================================= 26 | # Help and Documentation 27 | # ============================================================================= 28 | 29 | .PHONY: help 30 | help: ## Display this help text for Makefile 31 | @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z0-9_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) 32 | 33 | # ============================================================================= 34 | # Installation and Environment Setup 35 | # ============================================================================= 36 | 37 | .PHONY: install-uv 38 | install-uv: ## Install latest version of uv 39 | @echo "${INFO} Installing uv..." 40 | @curl -LsSf https://astral.sh/uv/install.sh | sh >/dev/null 2>&1 41 | @uv tool install nodeenv >/dev/null 2>&1 42 | @echo "${OK} UV installed successfully" 43 | 44 | .PHONY: install 45 | install: destroy clean ## Install the project, dependencies, and pre-commit 46 | @echo "${INFO} Starting fresh installation..." 47 | @uv python pin 3.12 >/dev/null 2>&1 48 | @uv venv >/dev/null 2>&1 49 | @uv sync --all-extras --dev 50 | @echo "${OK} Installation complete! 🎉" 51 | 52 | .PHONY: destroy 53 | destroy: ## Destroy the virtual environment 54 | @echo "${INFO} Destroying virtual environment... 🗑️" 55 | @rm -rf .venv 56 | @echo "${OK} Virtual environment destroyed 🗑️" 57 | 58 | # ============================================================================= 59 | # Dependency Management 60 | # ============================================================================= 61 | 62 | .PHONY: upgrade 63 | upgrade: ## Upgrade all dependencies to latest stable versions 64 | @echo "${INFO} Updating all dependencies... 🔄" 65 | @uv lock --upgrade 66 | @echo "${OK} Dependencies updated 🔄" 67 | @uv run pre-commit autoupdate 68 | @echo "${OK} Updated Pre-commit hooks 🔄" 69 | 70 | .PHONY: lock 71 | lock: ## Rebuild lockfiles from scratch 72 | @echo "${INFO} Rebuilding lockfiles... 🔄" 73 | @uv lock --upgrade >/dev/null 2>&1 74 | @echo "${OK} Lockfiles updated" 75 | 76 | # ============================================================================= 77 | # Build and Release 78 | # ============================================================================= 79 | 80 | .PHONY: build 81 | build: ## Build the package 82 | @echo "${INFO} Building package... 📦" 83 | @uv build >/dev/null 2>&1 84 | @echo "${OK} Package build complete" 85 | 86 | .PHONY: release 87 | release: ## Bump version and create release tag 88 | @echo "${INFO} Preparing for release... 📦" 89 | @make docs 90 | @make clean 91 | @make build 92 | @uv lock --upgrade-package pytest-databases 93 | @uv run bump-my-version bump $(bump) 94 | @echo "${OK} Release complete 🎉" 95 | 96 | # ============================================================================= 97 | # Cleaning and Maintenance 98 | # ============================================================================= 99 | 100 | .PHONY: clean 101 | clean: ## Cleanup temporary build artifacts 102 | @echo "${INFO} Cleaning working directory... 🧹" 103 | @rm -rf .pytest_cache .ruff_cache .hypothesis build/ dist/ .eggs/ .coverage coverage.xml coverage.json htmlcov/ .pytest_cache tests/.pytest_cache tests/**/.pytest_cache .mypy_cache .unasyncd_cache/ .auto_pytabs_cache node_modules >/dev/null 2>&1 104 | @find . -name '*.egg-info' -exec rm -rf {} + >/dev/null 2>&1 105 | @find . -type f -name '*.egg' -exec rm -f {} + >/dev/null 2>&1 106 | @find . -name '*.pyc' -exec rm -f {} + >/dev/null 2>&1 107 | @find . -name '*.pyo' -exec rm -f {} + >/dev/null 2>&1 108 | @find . -name '*~' -exec rm -f {} + >/dev/null 2>&1 109 | @find . -name '__pycache__' -exec rm -rf {} + >/dev/null 2>&1 110 | @find . -name '.ipynb_checkpoints' -exec rm -rf {} + >/dev/null 2>&1 111 | @echo "${OK} Working directory cleaned" 112 | $(MAKE) docs-clean 113 | 114 | # ============================================================================= 115 | # Testing and Quality Checks 116 | # ============================================================================= 117 | 118 | .PHONY: test 119 | test: ## Run the tests 120 | @echo "${INFO} Running test cases... 🧪" 121 | @uv run pytest --quiet --runpytest subprocess 122 | @echo "${OK} Tests passed ✨" 123 | 124 | .PHONY: coverage 125 | coverage: ## Run tests with coverage report 126 | @echo "${INFO} Running tests with coverage... 📊" 127 | @uv run pytest --cov -n auto --quiet 128 | @uv run coverage html >/dev/null 2>&1 129 | @uv run coverage xml >/dev/null 2>&1 130 | @echo "${OK} Coverage report generated ✨" 131 | 132 | # ----------------------------------------------------------------------------- 133 | # Type Checking 134 | # ----------------------------------------------------------------------------- 135 | 136 | .PHONY: mypy 137 | mypy: ## Run mypy 138 | @echo "${INFO} Running mypy... 🔍" 139 | @uv run dmypy run 140 | @echo "${OK} Mypy checks passed ✨" 141 | 142 | .PHONY: mypy-nocache 143 | mypy-nocache: ## Run Mypy without cache 144 | @echo "${INFO} Running mypy without cache... 🔍" 145 | @uv run mypy 146 | @echo "${OK} Mypy checks passed ✨" 147 | 148 | .PHONY: pyright 149 | pyright: ## Run pyright 150 | @echo "${INFO} Running pyright... 🔍" 151 | @uv run pyright 152 | @echo "${OK} Pyright checks passed ✨" 153 | 154 | .PHONY: type-check 155 | type-check: mypy pyright ## Run all type checking 156 | 157 | # ----------------------------------------------------------------------------- 158 | # Linting and Formatting 159 | # ----------------------------------------------------------------------------- 160 | 161 | .PHONY: pre-commit 162 | pre-commit: ## Run pre-commit hooks 163 | @echo "${INFO} Running pre-commit checks... 🔎" 164 | @NODE_OPTIONS="--no-deprecation --disable-warning=ExperimentalWarning" uv run pre-commit run --color=always --all-files 165 | @echo "${OK} Pre-commit checks passed ✨" 166 | 167 | .PHONY: slotscheck 168 | slotscheck: ## Run slotscheck 169 | @echo "${INFO} Running slots check... 🔍" 170 | @uv run slotscheck src/pytest_databases 171 | @echo "${OK} Slots check passed ✨" 172 | 173 | .PHONY: fix 174 | fix: ## Run code formatters 175 | @echo "${INFO} Running code formatters... 🔧" 176 | @uv run ruff check --fix --unsafe-fixes 177 | @echo "${OK} Code formatting complete ✨" 178 | 179 | .PHONY: lint 180 | lint: pre-commit type-check slotscheck ## Run all linting checks 181 | 182 | .PHONY: check-all 183 | check-all: lint test coverage ## Run all checks (lint, test, coverage) 184 | 185 | # ============================================================================= 186 | # Documentation 187 | # ============================================================================= 188 | 189 | .PHONY: docs-clean 190 | docs-clean: ## Clean documentation build 191 | @echo "${INFO} Cleaning documentation build assets... 🧹" 192 | @rm -rf docs/_build >/dev/null 2>&1 193 | @echo "${OK} Documentation assets cleaned" 194 | 195 | .PHONY: docs-serve 196 | docs-serve: ## Serve documentation locally 197 | @echo "${INFO} Starting documentation server... 📚" 198 | @uv run sphinx-autobuild docs docs/_build/ -j auto --watch src/pytest_databases --watch docs --watch tests --watch CONTRIBUTING.rst --open-browser 199 | 200 | .PHONY: docs 201 | docs: docs-clean ## Build documentation 202 | @echo "${INFO} Building documentation... 📝" 203 | @uv run sphinx-build -M html docs docs/_build/ -E -a -j auto -W --keep-going 204 | @echo "${OK} Documentation built successfully" 205 | 206 | .PHONY: docs-linkcheck 207 | docs-linkcheck: ## Check documentation links 208 | @echo "${INFO} Checking documentation links... 🔗" 209 | @uv run sphinx-build -b linkcheck ./docs ./docs/_build -D linkcheck_ignore='http://.*','https://.*' 210 | @echo "${OK} Link check complete" 211 | 212 | .PHONY: docs-linkcheck-full 213 | docs-linkcheck-full: ## Run full documentation link check 214 | @echo "${INFO} Running full link check... 🔗" 215 | @uv run sphinx-build -b linkcheck ./docs ./docs/_build -D linkcheck_anchors=0 216 | @echo "${OK} Full link check complete" 217 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 | 5 | | Project | | Status | 6 | | --------- | :-- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | 7 | | CI/CD | | [![Latest Release](https://github.com/litestar-org/pytest-databases/actions/workflows/release.yaml/badge.svg)](https://github.com/litestar-org/pytest-databases/actions/workflows/release.yaml) [![ci](https://github.com/litestar-org/pytest-databases/actions/workflows/ci.yaml/badge.svg)](https://github.com/litestar-org/pytest-databases/actions/workflows/ci.yaml) [![Documentation Building](https://github.com/litestar-org/pytest-databases/actions/workflows/docs.yml/badge.svg?branch=main)](https://github.com/litestar-org/pytest-databases/actions/workflows/docs.yml) | 8 | | Quality | | [![Coverage](https://codecov.io/github/litestar-org/pytest-databases/graph/badge.svg?token=vKez4Pycrc)](https://codecov.io/github/litestar-org/pytest-databases) [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=litestar-org_pytest-databases&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=litestar-org_pytest-databases) [![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=litestar-org_pytest-databases&metric=sqale_rating)](https://sonarcloud.io/summary/new_code?id=litestar-org_pytest-databases) [![Reliability Rating](https://sonarcloud.io/api/project_badges/measure?project=litestar-org_pytest-databases&metric=reliability_rating)](https://sonarcloud.io/summary/new_code?id=litestar-org_pytest-databases) [![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=litestar-org_pytest-databases&metric=security_rating)](https://sonarcloud.io/summary/new_code?id=litestar-org_pytest-databases) | 9 | | Package | | [![PyPI - Version](https://img.shields.io/pypi/v/pytest-databases?labelColor=202235&color=edb641&logo=python&logoColor=edb641)](https://badge.fury.io/py/pytest-databases) ![PyPI - Support Python Versions](https://img.shields.io/pypi/pyversions/pytest-databases?labelColor=202235&color=edb641&logo=python&logoColor=edb641) | 10 | | Community | | [![Discord](https://img.shields.io/discord/919193495116337154?labelColor=202235&color=edb641&label=chat%20on%20discord&logo=discord&logoColor=edb641)](https://discord.gg/litestar-919193495116337154) [![Matrix](https://img.shields.io/badge/chat%20on%20Matrix-bridged-202235?labelColor=202235&color=edb641&logo=matrix&logoColor=edb641)](https://matrix.to/#/#litestar:matrix.org) [![Medium](https://img.shields.io/badge/Medium-202235?labelColor=202235&color=edb641&logo=medium&logoColor=edb641)](https://blog.litestar.dev) [![Twitter](https://img.shields.io/twitter/follow/LitestarAPI?labelColor=202235&color=edb641&logo=twitter&logoColor=edb641&style=flat)](https://twitter.com/LitestarAPI) [![Blog](https://img.shields.io/badge/Blog-litestar.dev-202235?logo=blogger&labelColor=202235&color=edb641&logoColor=edb641)](https://blog.litestar.dev) | 11 | | Meta | | [![Litestar Project](https://img.shields.io/badge/Litestar%20Org-%E2%AD%90%20Litestar-202235.svg?logo=python&labelColor=202235&color=edb641&logoColor=edb641)](https://github.com/litestar-org/pytest-databases) [![types - Mypy](https://img.shields.io/badge/types-Mypy-202235.svg?logo=python&labelColor=202235&color=edb641&logoColor=edb641)](https://github.com/python/mypy) [![License - MIT](https://img.shields.io/badge/license-MIT-202235.svg?logo=python&labelColor=202235&color=edb641&logoColor=edb641)](https://spdx.org/licenses/) [![Litestar Sponsors](https://img.shields.io/badge/Sponsor-%E2%9D%A4-%23edb641.svg?&logo=github&logoColor=edb641&labelColor=202235)](https://github.com/sponsors/litestar-org) [![linting - Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json&labelColor=202235)](https://github.com/astral-sh/ruff) [![code style - Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/format.json&labelColor=202235)](https://github.com/psf/black) | 12 | 13 |
14 | 15 | # Pytest Databases 16 | 17 | Ready-made database fixtures for your pytest tests. 18 | 19 | ## Features 20 | 21 | - 🚀 Easy-to-use database fixtures 22 | - 🔄 Support for multiple database types 23 | - 🐳 Docker integration for isolated testing environments 24 | - ⚡ Fast and efficient test execution 25 | - 🔧 Highly configurable 26 | 27 | `pytest-databases` uses the Docker Python SDK to manage the startup and shutdown of database services in containers. The following databases are currently available: 28 | 29 | - **Postgres**: Version 12, 13, 14, 15, 16 and 17 are available 30 | - **MySQL**: Version 5.6, 5.7 and 8 are available 31 | - **Oracle**: Version 18c XE and 23C Free are available 32 | - **SQL Server**: Version 2022 is available 33 | - **Google AlloyDB Omni**: Simplified Omni installation for easy testing. 34 | - **Google Spanner**: The latest cloud-emulator from Google is available 35 | - **Google BigQuery**: Unofficial BigQuery emulator 36 | - **CockroachDB**: Version latest is available 37 | - **Redis**: Latest version 38 | - **Valkey**: Latest version 39 | - **Dragonfly**: Latest version 40 | - **KeyDB**: Latest version 41 | - **Elasticsearch**: Version 7 and 8 are available 42 | - **Azure blob storage**: Via azurite 43 | - **Minio**: Latest version 44 | 45 | ## Installation 46 | 47 | Quick install for postgres: 48 | 49 | ```bash 50 | pip install pytest-databases[postgres] 51 | ``` 52 | 53 | ## Quick Start 54 | 55 | - Add to your pytest `conftest.py`: 56 | 57 | ```py 58 | pytest_plugins = ["pytest_databases.docker.postgres"] 59 | ``` 60 | 61 | - Use in your tests: 62 | 63 | ```python 64 | from pytest_databases.docker.postgres import PostgresService 65 | import psycopg 66 | 67 | def test_one(postgres_service: PostgresService) -> None: 68 | with psycopg.connect( 69 | f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}", 70 | autocommit=True, 71 | ) as conn: 72 | result = conn.execute("SELECT 1") 73 | assert result 74 | ``` 75 | 76 | ## Documentation 77 | 78 | Full documentation is available at [https://pytest-databases.readthedocs.io/](https://pytest-databases.readthedocs.io/) 79 | 80 | ## Contributing 81 | 82 | Contributions are welcome! Please read our [Contributing Guide](CONTRIBUTING.md) for details on our code of conduct and the process for submitting pull requests. 83 | 84 | ## License 85 | 86 | This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. 87 | 88 | ## Acknowledgments 89 | 90 | - The Litestar Framework team 91 | - The pytest community 92 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | # Sphinx documentation 2 | _build/ 3 | .buildinfo 4 | .doctrees/ 5 | 6 | # Python 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # Environment 12 | .env 13 | .venv 14 | env/ 15 | venv/ 16 | ENV/ 17 | 18 | # IDE 19 | .idea/ 20 | .vscode/ 21 | *.swp 22 | *.swo 23 | 24 | # OS 25 | .DS_Store 26 | Thumbs.db 27 | -------------------------------------------------------------------------------- /docs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/litestar-org/pytest-databases/461f8982df55a5214068082ea72170c06513363d/docs/__init__.py -------------------------------------------------------------------------------- /docs/_static/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/litestar-org/pytest-databases/461f8982df55a5214068082ea72170c06513363d/docs/_static/.gitkeep -------------------------------------------------------------------------------- /docs/_static/custom.css: -------------------------------------------------------------------------------- 1 | /* Theme color definitions */ 2 | :root { 3 | --brand-font-size-xl: 6rem; 4 | --brand-font-size-lg: 5rem; 5 | --brand-font-size-md: 4rem; 6 | --brand-font-size-sm: 2.5rem; 7 | --brand-font-size-xs: 1.8rem; 8 | --brand-font-size-xxs: 1.6rem; 9 | 10 | --brand-letter-spacing-xl: 0.25em; 11 | --brand-letter-spacing-lg: 0.2em; 12 | --brand-letter-spacing-md: 0.1em; 13 | --brand-letter-spacing-sm: 0.05em; 14 | --brand-letter-spacing-xs: 0.03em; 15 | } 16 | 17 | html.light { 18 | --sl-color-primary: #202235; 19 | --sl-color-secondary: #edb641; 20 | --sl-color-accent: #ffd480; 21 | --sl-color-text-1: var(--sl-color-primary); 22 | --sl-color-text-2: var(--sl-color-secondary); 23 | --sy-c-foot-background: #f0f0f0; 24 | --yue-c-text: #000; 25 | --brand-text-glow: 0 0 10px rgba(32, 34, 53, 0.3), 26 | 0 0 20px rgba(32, 34, 53, 0.2), 0 0 30px rgba(237, 182, 65, 0.1); 27 | } 28 | 29 | html.dark { 30 | --sl-color-text-1: var(--sl-color-secondary); 31 | --sy-c-foot-background: black; 32 | --yue-c-text: #fff; 33 | --brand-text-glow: 0 0 10px rgba(237, 182, 65, 0.4), 34 | 0 0 20px rgba(237, 182, 65, 0.3), 0 0 30px rgba(237, 182, 65, 0.2); 35 | } 36 | 37 | .title-with-logo { 38 | display: flex; 39 | align-items: center; 40 | justify-content: center; 41 | margin: 5rem auto 4rem; 42 | width: 100%; 43 | padding: 0 2rem; 44 | user-select: none; 45 | -webkit-user-select: none; 46 | -moz-user-select: none; 47 | -ms-user-select: none; 48 | } 49 | 50 | html[class] .title-with-logo .brand-text { 51 | font-family: var(--sl-font-sans); 52 | font-weight: 300; 53 | font-size: var(--brand-font-size-lg); 54 | letter-spacing: var(--brand-letter-spacing-xl); 55 | text-transform: uppercase; 56 | text-align: center; 57 | line-height: 1.4; 58 | max-width: 100%; 59 | word-break: break-word; 60 | word-wrap: break-word; 61 | overflow-wrap: break-word; 62 | hyphens: auto; 63 | -webkit-hyphens: auto; 64 | -ms-hyphens: auto; 65 | transition: color var(--sl-transition), text-shadow var(--sl-transition); 66 | } 67 | 68 | html.light .title-with-logo .brand-text { 69 | color: var(--sl-color-text-1); 70 | text-shadow: var(--brand-text-glow); 71 | } 72 | 73 | html.dark .title-with-logo .brand-text { 74 | color: var(--sl-color-text-2); 75 | text-shadow: var(--brand-text-glow); 76 | } 77 | 78 | /* Button container wrapping */ 79 | .buttons.wrap { 80 | display: flex; 81 | flex-wrap: wrap; 82 | gap: 0.5rem; 83 | } 84 | 85 | .buttons.wrap .btn-no-wrap { 86 | flex: 0 0 auto; 87 | } 88 | 89 | /* Large screens */ 90 | @media (min-width: 1200px) { 91 | html[class] .title-with-logo .brand-text { 92 | font-size: var(--brand-font-size-xl); 93 | } 94 | } 95 | 96 | /* Medium-small screens */ 97 | @media (max-width: 991px) { 98 | html[class] .title-with-logo .brand-text { 99 | font-size: var(--brand-font-size-md); 100 | letter-spacing: var(--brand-letter-spacing-lg); 101 | } 102 | } 103 | 104 | /* Small screens */ 105 | @media (max-width: 767px) { 106 | html[class] .title-with-logo .brand-text { 107 | font-size: var(--brand-font-size-sm); 108 | letter-spacing: var(--brand-letter-spacing-md); 109 | } 110 | 111 | html[class] .title-with-logo { 112 | margin: 2rem auto 1.5rem; 113 | } 114 | } 115 | 116 | /* Extra small screens */ 117 | @media (max-width: 480px) { 118 | html[class] .title-with-logo .brand-text { 119 | font-size: var(--brand-font-size-xs); 120 | letter-spacing: var(--brand-letter-spacing-sm); 121 | line-height: 1.2; 122 | } 123 | 124 | html[class] .title-with-logo { 125 | margin: 1.5rem auto 1rem; 126 | padding: 0 1rem; 127 | } 128 | } 129 | 130 | /* Smallest screens */ 131 | @media (max-width: 360px) { 132 | html[class] .title-with-logo .brand-text { 133 | font-size: var(--brand-font-size-xxs); 134 | letter-spacing: var(--brand-letter-spacing-xs); 135 | } 136 | } 137 | 138 | /* Preserve existing layout styles */ 139 | #badges img { 140 | margin-top: 0; 141 | margin-bottom: 0; 142 | } 143 | 144 | #badges { 145 | display: flex; 146 | flex-wrap: wrap; 147 | gap: 10px; 148 | margin-bottom: 3em; 149 | } 150 | -------------------------------------------------------------------------------- /docs/_static/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | All notable changes to this project will be documented in this file. 5 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from datetime import datetime 3 | from pathlib import Path 4 | 5 | from pytest_databases.__metadata__ import __project__, __version__ 6 | 7 | sys.path.insert(0, str(Path("..").resolve())) 8 | 9 | 10 | # -- Project information ----------------------------------------------------- 11 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 12 | current_year = datetime.now().year # noqa: DTZ005 13 | 14 | project = __project__ 15 | copyright = f"{current_year}, Litestar Organization" # noqa: A001 16 | author = "Litestar Organization" 17 | release = __version__ 18 | 19 | # -- General configuration --------------------------------------------------- 20 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 21 | 22 | extensions = [ 23 | "sphinx.ext.autodoc", 24 | "sphinx.ext.napoleon", 25 | "sphinx.ext.intersphinx", 26 | "sphinx.ext.viewcode", 27 | "sphinx.ext.githubpages", 28 | "sphinx_copybutton", 29 | "sphinx_click", 30 | "sphinx_design", 31 | "auto_pytabs.sphinx_ext", 32 | "myst_parser", 33 | "sphinx_autodoc_typehints", 34 | ] 35 | 36 | templates_path = ["_templates"] 37 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 38 | 39 | # -- Options for HTML output ------------------------------------------------- 40 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 41 | 42 | html_theme = "shibuya" 43 | html_title = "Pytest Databases" 44 | pygments_style = "dracula" 45 | html_static_path = ["_static"] 46 | html_css_files = ["custom.css"] 47 | html_logo = "_static/logo.svg" 48 | html_favicon = "_static/logo.svg" # Optional: use logo as favicon 49 | 50 | # Shibuya theme options: https://shibuya.lepture.com/install/ 51 | html_theme_options = { 52 | "accent_color": "amber", 53 | "github_url": "https://github.com/litestar-org/pytest-databases", 54 | "discord_url": "https://discord.gg/litestar", 55 | } 56 | 57 | # Autodoc settings 58 | autodoc_default_options = { 59 | "members": True, 60 | "member-order": "bysource", 61 | "special-members": "__init__", 62 | "undoc-members": True, 63 | "exclude-members": "__weakref__", 64 | } 65 | 66 | # Intersphinx settings 67 | intersphinx_mapping = { 68 | "python": ("https://docs.python.org/3", None), 69 | "pytest": ("https://docs.pytest.org/en/latest", None), 70 | } 71 | -------------------------------------------------------------------------------- /docs/contribution-guide.rst: -------------------------------------------------------------------------------- 1 | :orphan: 2 | 3 | .. include:: ../CONTRIBUTING.rst 4 | -------------------------------------------------------------------------------- /docs/getting-started/basic-usage.rst: -------------------------------------------------------------------------------- 1 | Basic Usage 2 | =========== 3 | 4 | Once a plugin is enabled (e.g., PostgreSQL), you can use its fixtures directly in your tests. There are typically two main types of fixtures: 5 | 6 | 1. **Service Fixture** (e.g., `postgres_service`): Provides details about the running database service (host, port, credentials, etc.). Useful for connecting with your own client. 7 | 2. **Connection Fixture** (e.g., `postgres_connection`): Provides a ready-to-use connection object (where applicable) to the database service. 8 | 9 | .. code-block:: python 10 | 11 | # Assuming you have installed pytest-databases[postgres] and enabled the plugin 12 | # Also assuming a client like psycopg is installed: pip install psycopg 13 | import psycopg 14 | from pytest_databases.docker.postgres import PostgresService 15 | 16 | # Example using the Service Fixture 17 | def test_connection_with_service_details(postgres_service: PostgresService) -> None: 18 | conn_str = ( 19 | f"postgresql://{postgres_service.user}:{postgres_service.password}@" 20 | f"{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" 21 | ) 22 | with psycopg.connect(conn_str, autocommit=True) as conn: 23 | with conn.cursor() as cursor: 24 | cursor.execute("SELECT 1") 25 | assert cursor.fetchone() == (1,) 26 | 27 | # Example using the Connection Fixture 28 | def test_with_direct_connection(postgres_connection) -> None: 29 | # postgres_connection is often a configured client or connection object 30 | with postgres_connection.cursor() as cursor: 31 | cursor.execute("CREATE TABLE IF NOT EXISTS users (id INT PRIMARY KEY, name TEXT);") 32 | cursor.execute("INSERT INTO users (id, name) VALUES (1, 'Alice');") 33 | cursor.execute("SELECT name FROM users WHERE id = 1;") 34 | assert cursor.fetchone() == ('Alice',) 35 | -------------------------------------------------------------------------------- /docs/getting-started/configuration.rst: -------------------------------------------------------------------------------- 1 | Configuration 2 | ============= 3 | 4 | ``pytest-databases`` uses environment variables for configuration. This allows you to override default settings like Docker image tags, usernames, passwords, ports, and database names. 5 | 6 | Common Environment Variables 7 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 8 | 9 | These variables apply globally to the Docker setup: 10 | 11 | * ``SKIP_DOCKER_COMPOSE=True``: If set, skip trying to manage database containers via Docker Compose. Useful if you manage services externally. (Default: "False") 12 | * ``USE_LEGACY_DOCKER_COMPOSE=True``: If set, forces the use of the older ``docker-compose`` command instead of ``docker compose``. (Default: "False") 13 | * ``DOCKER_HOST``: Specifies the host where the Docker daemon is running and where services will be exposed. (Default: "127.0.0.1") 14 | 15 | Database-Specific Variables 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 17 | 18 | Some drivers have additional environment variables for configuration. 19 | 20 | Please refer to the documentation for the specific database you are using under the :doc:`../supported-databases/index` section for a complete list of its configuration variables. 21 | 22 | Accessing Configuration in Tests 23 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 24 | 25 | The effective configuration values (whether defaults or overridden by environment variables) are available as attributes on the service fixture objects: 26 | 27 | .. code-block:: python 28 | 29 | from pytest_databases.docker.postgres import PostgresService 30 | 31 | def test_postgres_config_access(postgres_service: PostgresService) -> None: 32 | # Access configuration values used by the running service 33 | print(f"Connecting to Postgres user: {postgres_service.user}") 34 | print(f"Using database: {postgres_service.database}") 35 | print(f"On host: {postgres_service.host}:{postgres_service.port}") 36 | 37 | # Example assertions (replace with your expected defaults or env overrides) 38 | assert postgres_service.user == "postgres" 39 | assert postgres_service.password == "super-secret" 40 | assert postgres_service.database == "pytest_databases" 41 | assert postgres_service.host == "127.0.0.1" # Or your DOCKER_HOST override 42 | assert isinstance(postgres_service.port, int) 43 | assert postgres_service.port > 0 44 | -------------------------------------------------------------------------------- /docs/getting-started/enabling-plugins.rst: -------------------------------------------------------------------------------- 1 | Enabling Database Plugins 2 | ========================= 3 | 4 | After installing the necessary extras, you need to tell pytest to load the corresponding plugin(s). Add the plugin path(s) to your pytest configuration. 5 | 6 | 7 | .. code-block:: python 8 | 9 | # Example: Enable PostgreSQL and Redis plugins 10 | pytest_plugins = [ 11 | "pytest_databases.docker.postgres", 12 | "pytest_databases.docker.redis", 13 | ] 14 | -------------------------------------------------------------------------------- /docs/getting-started/index.rst: -------------------------------------------------------------------------------- 1 | Getting Started 2 | =============== 3 | 4 | This guide will help you get started with ``pytest-databases``. We'll cover installation, enabling database plugins, basic usage, and configuration options. 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | 9 | installation 10 | enabling-plugins 11 | basic-usage 12 | configuration 13 | next-steps 14 | -------------------------------------------------------------------------------- /docs/getting-started/installation.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | First, install the base package using pip: 5 | 6 | .. code-block:: bash 7 | 8 | pip install pytest-databases 9 | 10 | Optional Database Support 11 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 12 | 13 | To use ``pytest-databases`` with specific databases, you need to install optional dependencies using "extras". You can install support for one or multiple databases at once. 14 | 15 | .. list-table:: Available Database Extras 16 | :widths: 25 50 17 | :header-rows: 1 18 | 19 | * - Database 20 | - Installation Extra 21 | * - PostgreSQL 22 | - ``pytest-databases[postgres]`` 23 | * - MySQL 24 | - ``pytest-databases[mysql]`` 25 | * - MariaDB 26 | - ``pytest-databases[mariadb]`` 27 | * - Oracle 28 | - ``pytest-databases[oracle]`` 29 | * - SQL Server 30 | - ``pytest-databases[sqlserver]`` 31 | * - Google AlloyDB Omni 32 | - ``pytest-databases[alloydb]`` 33 | * - Google Spanner 34 | - ``pytest-databases[spanner]`` 35 | * - Google BigQuery 36 | - ``pytest-databases[bigquery]`` 37 | * - CockroachDB 38 | - ``pytest-databases[cockroachdb]`` 39 | * - Redis 40 | - ``pytest-databases[redis]`` 41 | * - Valkey 42 | - ``pytest-databases[valkey]`` 43 | * - Dragonfly 44 | - ``pytest-databases[dragonfly]`` 45 | * - KeyDB 46 | - ``pytest-databases[keydb]`` 47 | * - Elasticsearch 48 | - ``pytest-databases[elasticsearch]`` 49 | * - Azure Blob Storage 50 | - ``pytest-databases[azure]`` 51 | * - MinIO 52 | - ``pytest-databases[minio]`` 53 | 54 | Example installing multiple extras: 55 | 56 | .. code-block:: bash 57 | 58 | pip install pytest-databases[postgres,mysql,redis] 59 | -------------------------------------------------------------------------------- /docs/getting-started/next-steps.rst: -------------------------------------------------------------------------------- 1 | Next Steps 2 | ========== 3 | 4 | * Browse the :doc:`../supported-databases/index` section for specifics on each supported database. 5 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | :layout: landing 2 | :description: Reusable test fixtures for any and all databases. 3 | 4 | .. container:: 5 | :name: home-head 6 | 7 | .. container:: 8 | 9 | .. raw:: html 10 | 11 | 14 | 15 | .. container:: badges 16 | :name: badges 17 | 18 | .. image:: https://img.shields.io/pypi/v/pytest-databases?labelColor=202235&color=edb641&logo=python&logoColor=edb641 19 | :alt: PyPI Version 20 | 21 | .. image:: https://img.shields.io/pypi/pyversions/pytest-databases?labelColor=202235&color=edb641&logo=python&logoColor=edb641 22 | :alt: Supported Python Versions 23 | 24 | .. rst-class:: lead 25 | 26 | The pytest-databases library is designed to simplify database testing by providing pre-configured setups for a wide range of database types and versions. 27 | 28 | .. container:: buttons wrap 29 | 30 | .. raw:: html 31 | 32 | Get Started 33 | Usage & API Docs 34 | 35 | .. grid:: 1 1 2 2 36 | :padding: 0 37 | :gutter: 2 38 | 39 | .. grid-item-card:: :octicon:`versions` Changelog 40 | :link: changelog 41 | :link-type: doc 42 | 43 | The latest updates and enhancements to Pytest Databases. 44 | 45 | .. grid-item-card:: :octicon:`issue-opened` Issues 46 | :link: https://github.com/litestar-org/pytest-databases/issues 47 | 48 | Report issues or suggest new features. 49 | 50 | .. grid-item-card:: :octicon:`comment-discussion` Discussions 51 | :link: https://github.com/litestar-org/pytest-databases/discussions 52 | 53 | Join discussions, pose questions, or share insights. 54 | 55 | .. grid-item-card:: :octicon:`beaker` Contributing 56 | :link: contribution-guide 57 | :link-type: doc 58 | 59 | Contribute to this project's growth with code, docs, and more. 60 | 61 | .. toctree:: 62 | :caption: Contents: 63 | :hidden: 64 | 65 | getting-started/index 66 | supported-databases/index 67 | 68 | .. toctree:: 69 | :titlesonly: 70 | :caption: Contributing 71 | :hidden: 72 | 73 | changelog 74 | contribution-guide 75 | Available Issues 76 | Code of Conduct 77 | -------------------------------------------------------------------------------- /docs/supported-databases/azure_blob_storage.rst: -------------------------------------------------------------------------------- 1 | Azure Blob Storage 2 | ================== 3 | 4 | Integration with `Azure Blob Storage `_, a cloud-based object storage service. 5 | 6 | This integration uses the official `Azure Storage Blobs Python Client `_ to interact with Azure Blob Storage, which provides scalable object storage for testing and development. 7 | 8 | Installation 9 | ------------ 10 | 11 | .. code-block:: bash 12 | 13 | pip install pytest-databases[azure] 14 | 15 | Configuration 16 | ------------- 17 | 18 | * ``AZURE_STORAGE_CONNECTION_STRING``: Connection string for Azure Blob Storage 19 | * ``AZURE_STORAGE_ACCOUNT_NAME``: Account name for Azure Blob Storage 20 | * ``AZURE_STORAGE_ACCOUNT_KEY``: Account key for Azure Blob Storage 21 | * ``AZURE_STORAGE_CONTAINER_NAME``: Container name for Azure Blob Storage (default: "pytest-databases") 22 | 23 | Usage Example 24 | ------------- 25 | 26 | .. code-block:: python 27 | 28 | import pytest 29 | from azure.storage.blob import BlobServiceClient 30 | from pytest_databases.docker.azure_blob import AzureBlobStorageService 31 | pytest_plugins = ["pytest_databases.docker.azure_blob"] 32 | 33 | def test(azure_blob_storage_service: AzureBlobStorageService) -> None: 34 | client = BlobServiceClient.from_connection_string( 35 | azure_blob_storage_service.connection_string 36 | ) 37 | container = client.get_container_client(azure_blob_storage_service.container_name) 38 | container.create_container() 39 | assert container.exists() 40 | 41 | def test(azure_blob_storage_client: BlobServiceClient) -> None: 42 | container = azure_blob_storage_client.get_container_client("test-container") 43 | container.create_container() 44 | assert container.exists() 45 | 46 | Available Fixtures 47 | ------------------ 48 | 49 | * ``azurite_in_memory``: Whether to use in-memory storage for Azurite (default: ``True``) 50 | * ``azure_blob_service``: A fixture that provides an Azure Blob Storage service. 51 | * ``azure_blob_default_container_name``: The default container name for Azure Blob Storage (default: ``pytest-databases``) 52 | * ``azure_blob_container_client``: A fixture that provides an Azure Blob Storage container client. 53 | * ``azure_blob_async_container_client``: A fixture that provides an Azure Blob Storage container client for async operations. 54 | 55 | Service API 56 | ----------- 57 | 58 | .. automodule:: pytest_databases.docker.azure_blob 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | -------------------------------------------------------------------------------- /docs/supported-databases/bigquery.rst: -------------------------------------------------------------------------------- 1 | BigQuery 2 | ======== 3 | 4 | Integration with `Google BigQuery `_ using the `BigQuery Emulator `_ 5 | 6 | This integration uses the official `Google Cloud BigQuery Python Client `_ for testing against the BigQuery Emulator. The emulator is a third-party project that provides a local development environment that mimics the behavior of BigQuery, allowing you to test your application without connecting to the actual service. 7 | 8 | Installation 9 | ------------ 10 | 11 | .. code-block:: bash 12 | 13 | pip install pytest-databases[bigquery] 14 | 15 | Usage Example 16 | ------------- 17 | 18 | .. code-block:: python 19 | 20 | import pytest 21 | from google.cloud import bigquery 22 | from pytest_databases.docker.bigquery import BigQueryService 23 | 24 | pytest_plugins = ["pytest_databases.docker.bigquery"] 25 | 26 | def test(bigquery_service: BigQueryService) -> None: 27 | client = bigquery.Client( 28 | project=bigquery_service.project, 29 | client_options=bigquery_service.client_options, 30 | credentials=bigquery_service.credentials, 31 | ) 32 | 33 | job = client.query(query="SELECT 1 as one") 34 | resp = list(job.result()) 35 | assert resp[0].one == 1 36 | 37 | def test(bigquery_client: bigquery.Client) -> None: 38 | assert isinstance(bigquery_client, bigquery.Client) 39 | 40 | Available Fixtures 41 | ------------------ 42 | 43 | * ``bigquery_image``: The Docker image to use for BigQuery. 44 | * ``bigquery_service``: A fixture that provides a BigQuery service. 45 | * ``bigquery_client``: A fixture that provides a BigQuery client. 46 | 47 | Service API 48 | ----------- 49 | 50 | .. automodule:: pytest_databases.docker.bigquery 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | -------------------------------------------------------------------------------- /docs/supported-databases/cockroachdb.rst: -------------------------------------------------------------------------------- 1 | CockroachDB 2 | =========== 3 | 4 | Integration with `CockroachDB `_ 5 | 6 | Installation 7 | ------------ 8 | 9 | .. code-block:: bash 10 | 11 | pip install pytest-databases[cockroachdb] 12 | 13 | Usage Example 14 | ------------- 15 | 16 | .. code-block:: python 17 | 18 | import pytest 19 | import psycopg 20 | from pytest_databases.docker.cockroachdb import CockroachDBService 21 | 22 | pytest_plugins = ["pytest_databases.docker.cockroachdb"] 23 | 24 | @pytest.fixture(scope="session") 25 | def cockroach_uri(cockroachdb_service: CockroachDBService) -> str: 26 | opts = "&".join(f"{k}={v}" for k, v in cockroachdb_service.driver_opts.items()) 27 | return f"postgresql://root@{cockroachdb_service.host}:{cockroachdb_service.port}/{cockroachdb_service.database}?{opts}" 28 | 29 | def test(cockroach_uri: str) -> None: 30 | with psycopg.connect(cockroach_uri) as conn: 31 | db_open = conn.execute("SELECT 1").fetchone() 32 | assert db_open is not None and db_open[0] == 1 33 | 34 | def test(cockroachdb_connection: psycopg.Connection) -> None: 35 | cockroachdb_connection.execute("CREATE TABLE if not exists simple_table as SELECT 1") 36 | result = cockroachdb_connection.execute("select * from simple_table").fetchone() 37 | assert result is not None and result[0] == 1 38 | 39 | Available Fixtures 40 | ------------------ 41 | 42 | * ``cockroachdb_image``: The Docker image to use for CockroachDB. 43 | * ``cockroachdb_service``: A fixture that provides a CockroachDB service. 44 | * ``cockroachdb_connection``: A fixture that provides a CockroachDB connection. 45 | * ``cockroachdb_driver_opts``: A fixture that provides driver options for CockroachDB. 46 | 47 | Service API 48 | ----------- 49 | 50 | .. automodule:: pytest_databases.docker.cockroachdb 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | -------------------------------------------------------------------------------- /docs/supported-databases/elasticsearch.rst: -------------------------------------------------------------------------------- 1 | Elasticsearch 2 | ============= 3 | 4 | Integration with `Elasticsearch `_ 5 | 6 | The following Docker images are supported: 7 | 8 | * `Elasticsearch Docker Images `_ 9 | 10 | 11 | Installation 12 | ------------ 13 | 14 | For Elasticsearch 7.x: 15 | 16 | .. code-block:: bash 17 | 18 | pip install pytest-databases[elasticsearch7] 19 | 20 | For Elasticsearch 8.x: 21 | 22 | .. code-block:: bash 23 | 24 | pip install pytest-databases[elasticsearch8] 25 | 26 | 27 | Usage Example 28 | ------------- 29 | 30 | For Elasticsearch 7.x: 31 | 32 | .. code-block:: python 33 | 34 | import pytest 35 | from elasticsearch7 import Elasticsearch 36 | from pytest_databases.docker.elastic_search import ElasticsearchService 37 | 38 | pytest_plugins = ["pytest_databases.docker.elastic_search"] 39 | 40 | def test(elasticsearch_7_service: ElasticsearchService) -> None: 41 | with Elasticsearch( 42 | hosts=[ 43 | { 44 | "host": elasticsearch_7_service.host, 45 | "port": elasticsearch_7_service.port, 46 | "scheme": elasticsearch_7_service.scheme, 47 | } 48 | ], 49 | verify_certs=False, 50 | http_auth=(elasticsearch_7_service.user, elasticsearch_7_service.password), 51 | ) as client: 52 | info = client.info() 53 | assert info["version"]["number"] == "7.17.19" 54 | 55 | For Elasticsearch 8.x: 56 | 57 | .. code-block:: python 58 | 59 | import pytest 60 | from elasticsearch8 import Elasticsearch 61 | from pytest_databases.docker.elastic_search import ElasticsearchService 62 | 63 | pytest_plugins = ["pytest_databases.docker.elastic_search"] 64 | 65 | def test(elasticsearch_8_service: ElasticsearchService) -> None: 66 | with Elasticsearch( 67 | hosts=[ 68 | { 69 | "host": elasticsearch_8_service.host, 70 | "port": elasticsearch_8_service.port, 71 | "scheme": elasticsearch_8_service.scheme, 72 | } 73 | ], 74 | verify_certs=False, 75 | basic_auth=(elasticsearch_8_service.user, elasticsearch_8_service.password), 76 | ) as client: 77 | info = client.info() 78 | assert info["version"]["number"] == "8.13.0" 79 | 80 | Available Fixtures 81 | ------------------ 82 | 83 | * ``elasticsearch_service_memory_limit``: The memory limit for the Elasticsearch service (default: ``500m``) 84 | * ``elasticsearch_service``: A fixture that provides an Elasticsearch service. 85 | 86 | The following version-specific fixtures are also available: 87 | 88 | * ``elasticsearch_7_service``: Elasticsearch 7.x 89 | * ``elasticsearch_8_service``: Elasticsearch 8.x 90 | 91 | Service API 92 | ----------- 93 | 94 | .. automodule:: pytest_databases.docker.elastic_search 95 | :members: 96 | :undoc-members: 97 | :show-inheritance: 98 | -------------------------------------------------------------------------------- /docs/supported-databases/index.rst: -------------------------------------------------------------------------------- 1 | Supported Databases 2 | =================== 3 | 4 | This section provides detailed information on the supported databases, including installation, configuration, available services, and fixtures. 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | 9 | postgres 10 | mysql 11 | mariadb 12 | oracle 13 | sqlserver 14 | spanner 15 | bigquery 16 | cockroachdb 17 | redis 18 | valkey 19 | elasticsearch 20 | azure_blob_storage 21 | minio 22 | -------------------------------------------------------------------------------- /docs/supported-databases/mariadb.rst: -------------------------------------------------------------------------------- 1 | MariaDB 2 | ======= 3 | 4 | Integration with `MariaDB `_, a community-developed, commercially supported fork of the MySQL relational database management system. 5 | 6 | This integration uses the official `MariaDB Python Connector `_ to interact with MariaDB. 7 | 8 | Installation 9 | ------------ 10 | 11 | .. code-block:: bash 12 | 13 | pip install pytest-databases[mariadb] 14 | 15 | 16 | Usage Example 17 | ------------- 18 | 19 | .. code-block:: python 20 | 21 | import pytest 22 | import mariadb 23 | from pytest_databases.docker.mariadb import MariaDBService 24 | 25 | pytest_plugins = ["pytest_databases.docker.mariadb"] 26 | 27 | def test(mariadb_service: MariaDBService) -> None: 28 | with mariadb.connect( 29 | host=mariadb_service.host, 30 | port=mariadb_service.port, 31 | user=mariadb_service.user, 32 | database=mariadb_service.db, 33 | password=mariadb_service.password, 34 | ) as conn, conn.cursor() as cursor: 35 | cursor.execute("select 1 as is_available") 36 | resp = cursor.fetchone() 37 | assert resp is not None and resp[0] == 1 38 | 39 | def test(mariadb_connection: mariadb.Connection) -> None: 40 | with mariadb_connection.cursor() as cursor: 41 | cursor.execute("CREATE TABLE if not exists simple_table as SELECT 1 as the_value") 42 | cursor.execute("select * from simple_table") 43 | result = cursor.fetchall() 44 | assert result is not None and result[0][0] == 1 45 | 46 | Available Fixtures 47 | ------------------ 48 | 49 | * ``mariadb_service``: A fixture that provides a MariaDB service. 50 | * ``mariadb_connection``: A fixture that provides a MariaDB connection. 51 | 52 | The following version-specific fixtures are also available: 53 | 54 | * ``mariadb_113_service``: A fixture that provides a MariaDB 11.3 service. 55 | * ``mariadb_113_connection``: A fixture that provides a MariaDB 11.3 connection. 56 | 57 | 58 | Service API 59 | ----------- 60 | 61 | .. automodule:: pytest_databases.docker.mariadb 62 | :members: 63 | :undoc-members: 64 | :show-inheritance: 65 | -------------------------------------------------------------------------------- /docs/supported-databases/minio.rst: -------------------------------------------------------------------------------- 1 | MinIO 2 | ===== 3 | 4 | Integration with `MinIO `_, an S3-compatible object storage service. 5 | 6 | This integration uses the official `MinIO Python Client `_ to interact with MinIO, which provides S3-compatible object storage for testing and development. 7 | 8 | Installation 9 | ------------ 10 | 11 | .. code-block:: bash 12 | 13 | pip install pytest-databases[minio] 14 | 15 | Docker Image 16 | ------------ 17 | 18 | `Official MinIO Docker Image `_ 19 | 20 | Configuration 21 | ------------- 22 | 23 | * ``MINIO_ACCESS_KEY``: Access key for MinIO (default: "minio") 24 | * ``MINIO_SECRET_KEY``: Secret key for MinIO (default: "minio123") 25 | * ``MINIO_SECURE``: Whether to use HTTPS (default: "false") 26 | 27 | Usage Example 28 | ------------- 29 | 30 | .. code-block:: python 31 | 32 | import pytest 33 | from minio import Minio 34 | from pytest_databases.docker.minio import MinioService 35 | 36 | pytest_plugins = ["pytest_databases.docker.minio"] 37 | 38 | def test(minio_service: MinioService) -> None: 39 | client = Minio( 40 | endpoint=minio_service.endpoint, 41 | access_key=minio_service.access_key, 42 | secret_key=minio_service.secret_key, 43 | secure=minio_service.secure, 44 | ) 45 | client.make_bucket("test-bucket") 46 | assert client.bucket_exists("test-bucket") 47 | 48 | def test(minio_client: Minio) -> None: 49 | minio_client.make_bucket("test-bucket") 50 | assert minio_client.bucket_exists("test-bucket") 51 | 52 | Available Fixtures 53 | ------------------ 54 | 55 | * ``minio_access_key``: The access key for MinIO defaults to os.getenv("MINIO_ACCESS_KEY", "minio"). 56 | * ``minio_secret_key``: The secret key for MinIO defaults to os.getenv("MINIO_SECRET_KEY", "minio123"). 57 | * ``minio_secure``: Whether to use HTTPS for MinIO defaults to os.getenv("MINIO_SECURE", "false"). 58 | * ``minio_service``: A fixture that provides a MinIO service. 59 | * ``minio_client``: A fixture that provides a MinIO client. 60 | * ``minio_default_bucket_name``: A fixture that provides the default bucket name. 61 | 62 | Service API 63 | ----------- 64 | 65 | .. automodule:: pytest_databases.docker.minio 66 | :members: 67 | :undoc-members: 68 | :show-inheritance: 69 | -------------------------------------------------------------------------------- /docs/supported-databases/mysql.rst: -------------------------------------------------------------------------------- 1 | MySQL 2 | ===== 3 | 4 | Integration with `MySQL `_ 5 | 6 | Installation 7 | ------------ 8 | 9 | .. code-block:: bash 10 | 11 | pip install pytest-databases[mysql] 12 | 13 | Usage Example 14 | ------------- 15 | 16 | .. code-block:: python 17 | 18 | import pytest 19 | import mysql.connector 20 | from pytest_databases.docker.mysql import MySQLService 21 | 22 | pytest_plugins = ["pytest_databases.docker.mysql"] 23 | 24 | def test(mysql_service: MySQLService) -> None: 25 | with mysql.connector.connect( 26 | host=mysql_service.host, 27 | port=mysql_service.port, 28 | user=mysql_service.user, 29 | database=mysql_service.db, 30 | password=mysql_service.password, 31 | ) as conn, conn.cursor() as cursor: 32 | cursor.execute("select 1 as is_available") 33 | resp = cursor.fetchone() 34 | assert resp is not None and resp[0] == 1 35 | 36 | def test(mysql_connection: mysql.connector.MySQLConnection) -> None: 37 | with mysql_connection.cursor() as cursor: 38 | cursor.execute("CREATE TABLE if not exists simple_table as SELECT 1 as the_value") 39 | cursor.execute("select * from simple_table") 40 | result = cursor.fetchall() 41 | assert result is not None and result[0][0] == 1 42 | 43 | Available Fixtures 44 | ------------------ 45 | 46 | * ``mysql_service``: A fixture that provides a MySQL service (latest version). 47 | * ``mysql_connection``: A fixture that provides a MySQL connection. 48 | 49 | The following version-specific fixtures are also available: 50 | 51 | * ``mysql_56_service``, ``mysql_56_connection``: MySQL 5.6 52 | * ``mysql_57_service``, ``mysql_57_connection``: MySQL 5.7 53 | * ``mysql_8_service``, ``mysql_8_connection``: MySQL 8.x 54 | 55 | Service API 56 | ----------- 57 | 58 | .. automodule:: pytest_databases.docker.mysql 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | -------------------------------------------------------------------------------- /docs/supported-databases/oracle.rst: -------------------------------------------------------------------------------- 1 | Oracle 2 | ====== 3 | 4 | Integration with `Oracle Database `_ 5 | 6 | Installation 7 | ------------ 8 | 9 | .. code-block:: bash 10 | 11 | pip install pytest-databases[oracle] 12 | 13 | Usage Example 14 | ------------- 15 | 16 | .. code-block:: python 17 | 18 | import pytest 19 | import oracledb 20 | from pytest_databases.docker.oracle import OracleService 21 | 22 | pytest_plugins = ["pytest_databases.docker.oracle"] 23 | 24 | def test(oracle_service: OracleService) -> None: 25 | conn = oracledb.connect( 26 | user=oracle_service.user, 27 | password=oracle_service.password, 28 | service_name=oracle_service.service_name, 29 | host=oracle_service.host, 30 | port=oracle_service.port, 31 | ) 32 | with conn.cursor() as cur: 33 | cur.execute("SELECT 1 FROM dual") 34 | res = cur.fetchone()[0] 35 | assert res == 1 36 | 37 | def test(oracle_startup_connection: oracledb.Connection) -> None: 38 | with oracle_startup_connection.cursor() as cursor: 39 | cursor.execute("CREATE or replace view simple_table as SELECT 1 as the_value from dual") 40 | cursor.execute("select * from simple_table") 41 | result = cursor.fetchall() 42 | assert result is not None and result[0][0] == 1 43 | 44 | Available Fixtures 45 | ------------------ 46 | 47 | * ``oracle_image``: The Docker image to use for Oracle. 48 | * ``oracle_service``: A fixture that provides an Oracle service. 49 | * ``oracle_startup_connection``: A fixture that provides an Oracle connection. 50 | 51 | The following version-specific fixtures are also available: 52 | 53 | * ``oracle_18c_image``, ``oracle_18c_service_name``, ``oracle_18c_service``, ``oracle_18c_connection``: Oracle 18c 54 | * ``oracle_23ai_image``, ``oracle_23ai_service_name``, ``oracle_23ai_service``, ``oracle_23ai_connection``: Oracle 23ai 55 | 56 | Service API 57 | ----------- 58 | 59 | .. automodule:: pytest_databases.docker.oracle 60 | :members: 61 | :undoc-members: 62 | :show-inheritance: 63 | -------------------------------------------------------------------------------- /docs/supported-databases/postgres.rst: -------------------------------------------------------------------------------- 1 | PostgreSQL 2 | ========== 3 | 4 | Integration with `PostgreSQL `_ using the `PostgreSQL Docker Image `_, Google's `AlloyDB Omni `_ or `pgvector Docker Image `_ 5 | 6 | Installation 7 | ------------ 8 | 9 | .. code-block:: bash 10 | 11 | pip install pytest-databases[postgres] 12 | 13 | Usage Example 14 | ------------- 15 | 16 | .. code-block:: python 17 | 18 | import pytest 19 | import psycopg 20 | from pytest_databases.docker.postgres import PostgresService 21 | 22 | pytest_plugins = ["pytest_databases.docker.postgres"] 23 | 24 | def test(postgres_service: PostgresService) -> None: 25 | with psycopg.connect( 26 | f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" 27 | ) as conn: 28 | db_open = conn.execute("SELECT 1").fetchone() 29 | assert db_open is not None and db_open[0] == 1 30 | 31 | def test(postgres_connection: psycopg.Connection) -> None: 32 | postgres_connection.execute("CREATE TABLE if not exists simple_table as SELECT 1") 33 | result = postgres_connection.execute("select * from simple_table").fetchone() 34 | assert result is not None and result[0] == 1 35 | 36 | Available Fixtures 37 | ------------------ 38 | 39 | * ``postgres_user``: The PostgreSQL user. 40 | * ``postgres_password``: The PostgreSQL password. 41 | * ``postgres_database``: The PostgreSQL database name to use. 42 | * ``postgres_image``: The Docker image to use for PostgreSQL. 43 | * ``postgres_service``: A fixture that provides a PostgreSQL service. 44 | * ``postgres_connection``: A fixture that provides a PostgreSQL connection. 45 | 46 | The following version-specific fixtures are also available: 47 | 48 | * ``alloydb_omni_image``, ``alloydb_omni_service``, ``alloydb_omni_connection``: Latest Available AlloyDB Omni 16 Docker image. 49 | * ``postgres_11_image``, ``postgres_11_service``, ``postgres_11_connection``: PostgreSQL 11.x 50 | * ``postgres_12_image``, ``postgres_12_service``, ``postgres_12_connection``: PostgreSQL 12.x 51 | * ``postgres_13_image``, ``postgres_13_service``, ``postgres_13_connection``: PostgreSQL 13.x 52 | * ``postgres_14_image``, ``postgres_14_service``, ``postgres_14_connection``: PostgreSQL 14.x 53 | * ``postgres_15_image``, ``postgres_15_service``, ``postgres_15_connection``: PostgreSQL 15.x 54 | * ``postgres_16_image``, ``postgres_16_service``, ``postgres_16_connection``: PostgreSQL 16.x 55 | * ``postgres_17_image``, ``postgres_17_service``, ``postgres_17_connection``: PostgreSQL 17.x 56 | * ``pgvector_image``, ``pgvector_service``. ``pgvector_connection``: Latest Available pgvector Docker image. 57 | 58 | 59 | Service API 60 | ----------- 61 | 62 | .. automodule:: pytest_databases.docker.postgres 63 | :members: 64 | :undoc-members: 65 | :show-inheritance: 66 | -------------------------------------------------------------------------------- /docs/supported-databases/redis.rst: -------------------------------------------------------------------------------- 1 | Redis 2 | ===== 3 | 4 | Integration with `Redis `_ using the `Redis Docker Image `_, Snap's `Key DB` or `Dragonfly `_. 5 | 6 | Installation 7 | ------------ 8 | 9 | .. code-block:: bash 10 | 11 | pip install pytest-databases[redis] 12 | 13 | Usage Example 14 | ------------- 15 | 16 | .. code-block:: python 17 | 18 | import pytest 19 | import redis 20 | from pytest_databases.docker.redis import RedisService 21 | 22 | pytest_plugins = ["pytest_databases.docker.redis"] 23 | 24 | def test(redis_service: RedisService) -> None: 25 | client = redis.Redis( 26 | host=redis_service.host, 27 | port=redis_service.port, 28 | db=redis_service.db 29 | ) 30 | client.set("test_key", "test_value") 31 | assert client.get("test_key") == b"test_value" 32 | 33 | def test(redis_connection: redis.Redis) -> None: 34 | redis_connection.set("test_key", "test_value") 35 | assert redis_connection.get("test_key") == b"test_value" 36 | 37 | Available Fixtures 38 | ------------------ 39 | 40 | * ``redis_port``: The port number for the Redis service. 41 | * ``redis_host``: The host name for the Redis service. 42 | * ``redis_image``: The Docker image to use for Redis. 43 | * ``redis_service``: A fixture that provides a Redis service. 44 | * ``redis_connection``: A fixture that provides a Redis connection. 45 | 46 | The following version-specific fixtures are also available: 47 | 48 | * ``dragonflydb_port``, ``dragonflydb_host``, ``dragonflydb_image``, ``dragonflydb_service``, ``dragonflydb_connection``: Latest Available DragonflyDB Docker image. 49 | * ``keydb_port``, ``keydb_host``, ``keydb_image``, ``keydb_service``, ``keydb_connection``: Latest Available KeyDB Docker image. 50 | 51 | Service API 52 | ----------- 53 | 54 | .. automodule:: pytest_databases.docker.redis 55 | :members: 56 | :undoc-members: 57 | :show-inheritance: 58 | -------------------------------------------------------------------------------- /docs/supported-databases/spanner.rst: -------------------------------------------------------------------------------- 1 | Spanner 2 | ======= 3 | 4 | Integration with `Google Cloud Spanner `_ using the `Spanner Emulator `_ 5 | 6 | This integration uses the official `Google Cloud Spanner Python Client `_ for testing against the Spanner Emulator. The emulator provides a local development environment that mimics the behavior of Cloud Spanner, allowing you to test your application without connecting to the actual service. 7 | 8 | Installation 9 | ------------ 10 | 11 | .. code-block:: bash 12 | 13 | pip install pytest-databases[spanner] 14 | 15 | Usage Example 16 | ------------- 17 | 18 | .. code-block:: python 19 | 20 | import pytest 21 | from google.cloud import spanner 22 | import contextlib 23 | from pytest_databases.docker.spanner import SpannerService 24 | 25 | pytest_plugins = ["pytest_databases.docker.spanner"] 26 | 27 | def test(spanner_service: SpannerService) -> None: 28 | spanner_client = spanner.Client( 29 | project=spanner_service.project, 30 | credentials=spanner_service.credentials, 31 | client_options=spanner_service.client_options, 32 | ) 33 | instance = spanner_client.instance(spanner_service.instance_name) 34 | with contextlib.suppress(Exception): 35 | instance.create() 36 | 37 | database = instance.database(spanner_service.database_name) 38 | with contextlib.suppress(Exception): 39 | database.create() 40 | 41 | with database.snapshot() as snapshot: 42 | resp = next(iter(snapshot.execute_sql("SELECT 1"))) 43 | assert resp[0] == 1 44 | 45 | def test(spanner_connection: spanner.Client) -> None: 46 | assert isinstance(spanner_connection, spanner.Client) 47 | 48 | Available Fixtures 49 | ------------------ 50 | 51 | * ``spanner_image``: The Docker image to use for Spanner. 52 | * ``spanner_service``: A fixture that provides a Spanner service. 53 | * ``spanner_connection``: A fixture that provides a Spanner connection. 54 | 55 | Service API 56 | ----------- 57 | 58 | .. automodule:: pytest_databases.docker.spanner 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | -------------------------------------------------------------------------------- /docs/supported-databases/sqlserver.rst: -------------------------------------------------------------------------------- 1 | SQL Server 2 | ========== 3 | 4 | Integration with `Microsoft SQL Server `_ using the `Microsoft SQL Server Docker Image `_ 5 | 6 | Installation 7 | ------------ 8 | 9 | .. code-block:: bash 10 | 11 | pip install pytest-databases[mssql] 12 | 13 | Usage Example 14 | ------------- 15 | 16 | .. code-block:: python 17 | 18 | import pytest 19 | import pymssql 20 | from pytest_databases.docker.mssql import MSSQLService 21 | 22 | pytest_plugins = ["pytest_databases.docker.mssql"] 23 | 24 | def test(mssql_service: MSSQLService) -> None: 25 | conn = pymssql.connect( 26 | host=mssql_service.host, 27 | port=str(mssql_service.port), 28 | database=mssql_service.database, 29 | user=mssql_service.user, 30 | password=mssql_service.password, 31 | timeout=2, 32 | ) 33 | with conn.cursor() as cursor: 34 | cursor.execute("select 1 as is_available") 35 | resp = cursor.fetchone() 36 | assert resp is not None and resp[0] == 1 37 | 38 | def test(mssql_connection: pymssql.Connection) -> None: 39 | with mssql_connection.cursor() as cursor: 40 | cursor.execute("CREATE view simple_table as SELECT 1 as the_value") 41 | cursor.execute("select * from simple_table") 42 | result = cursor.fetchall() 43 | assert result is not None and result[0][0] == 1 44 | cursor.execute("drop view simple_table") 45 | 46 | Available Fixtures 47 | ------------------ 48 | 49 | * ``mssql_image``: The Docker image to use for SQL Server. 50 | * ``mssql_service``: A fixture that provides a SQL Server service. 51 | * ``mssql_connection``: A fixture that provides a SQL Server connection. 52 | 53 | Service API 54 | ----------- 55 | 56 | .. automodule:: pytest_databases.docker.mssql 57 | :members: 58 | :undoc-members: 59 | :show-inheritance: 60 | -------------------------------------------------------------------------------- /docs/supported-databases/valkey.rst: -------------------------------------------------------------------------------- 1 | Valkey 2 | ====== 3 | 4 | Integration with `Valkey `_ using the `Valkey Docker Image `_ 5 | 6 | Installation 7 | ------------ 8 | 9 | .. code-block:: bash 10 | 11 | pip install pytest-databases[valkey] 12 | 13 | Usage Example 14 | ------------- 15 | 16 | .. code-block:: python 17 | 18 | import pytest 19 | from valkey import Valkey 20 | from pytest_databases.docker.valkey import ValkeyService 21 | 22 | pytest_plugins = ["pytest_databases.docker.valkey"] 23 | 24 | def test(valkey_service: ValkeyService) -> None: 25 | client = Valkey( 26 | host=valkey_service.host, 27 | port=valkey_service.port, 28 | db=valkey_service.db 29 | ) 30 | client.set("test_key", "test_value") 31 | assert client.get("test_key") == b"test_value" 32 | 33 | def test(valkey_connection: Valkey) -> None: 34 | valkey_connection.set("test_key", "test_value") 35 | assert valkey_connection.get("test_key") == b"test_value" 36 | 37 | Available Fixtures 38 | ------------------ 39 | 40 | * ``valkey_port``: The port number for the Valkey service. 41 | * ``valkey_host``: The host name for the Valkey service. 42 | * ``valkey_image``: The Docker image to use for Valkey. 43 | * ``valkey_service``: A fixture that provides a Valkey service. 44 | * ``valkey_connection``: A fixture that provides a Valkey connection. 45 | 46 | Service API 47 | ----------- 48 | 49 | .. automodule:: pytest_databases.docker.valkey 50 | :members: 51 | :undoc-members: 52 | :show-inheritance: 53 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "hatchling.build" 3 | requires = ["hatchling"] 4 | 5 | #################### 6 | # Project Metadata # 7 | #################### 8 | 9 | [project] 10 | description = 'Reusable database fixtures for any and all databases.' 11 | license = "MIT" 12 | name = "pytest-databases" 13 | readme = "README.md" 14 | requires-python = ">=3.9" 15 | version = "0.13.0" 16 | # 17 | authors = [{ name = "Cody Fincher", email = "cody@litestar.dev" }] 18 | keywords = [ 19 | "database", 20 | "migration", 21 | "postgres", 22 | "mysql", 23 | "oracle", 24 | "mssql", 25 | "duckdb", 26 | "bigquery", 27 | "spanner", 28 | "alloydb", 29 | "alloydbomni", 30 | "cockroachdb", 31 | "redis", 32 | "elasticsearch", 33 | "azure", 34 | "valkey", 35 | "dragonflydb", 36 | ] 37 | # options under https://pypi.org/classifiers/ 38 | classifiers = [ 39 | "Development Status :: 4 - Beta", 40 | "Programming Language :: Python", 41 | "Programming Language :: Python :: 3.9", 42 | "Programming Language :: Python :: 3.10", 43 | "Programming Language :: Python :: 3.11", 44 | "Programming Language :: Python :: 3.12", 45 | "Programming Language :: Python :: 3.13", 46 | "Programming Language :: Python :: Implementation :: CPython", 47 | "Programming Language :: Python :: Implementation :: PyPy", 48 | ] 49 | # direct dependencies of this package 50 | dependencies = ["pytest", "filelock", "docker"] 51 | 52 | [project.urls] 53 | Documentation = "https://github.com/litestar-org/pytest-databases#readme" 54 | Issues = "https://github.com/litestar-org/pytest-databases/issues" 55 | Source = "https://github.com/litestar-org/pytest-databases" 56 | 57 | [project.optional-dependencies] 58 | azure-storage = ["azure-storage-blob"] 59 | bigquery = ["google-cloud-bigquery"] 60 | cockroachdb = ["psycopg"] 61 | dragonfly = ["redis"] 62 | elasticsearch7 = ["elasticsearch7"] 63 | elasticsearch8 = ["elasticsearch8"] 64 | keydb = ["redis"] 65 | mariadb = ["mariadb"] 66 | minio = ["minio"] 67 | mssql = ["pymssql"] 68 | mysql = ["mysql-connector-python"] 69 | oracle = ["oracledb"] 70 | postgres = ["psycopg>=3"] 71 | redis = ["redis"] 72 | spanner = ["google-cloud-spanner"] 73 | valkey = ["valkey"] 74 | 75 | [dependency-groups] 76 | dev = [ 77 | # tests 78 | "bump-my-version", 79 | "pytest-databases[azure-storage,bigquery,cockroachdb,dragonfly,elasticsearch7,elasticsearch8,keydb,mssql,mysql,mariadb,oracle,postgres,redis,spanner,minio,valkey]", 80 | "coverage[toml]>=6.2", 81 | "pytest", 82 | "pytest-cov", 83 | "pytest-cdist>=0.2", 84 | "pytest-mock", 85 | "pytest-click", 86 | "pytest-xdist", 87 | "pytest-sugar", 88 | "slotscheck", 89 | "psycopg-binary", # This fixes tests failing on M series CPUs. 90 | # lint 91 | "mypy", 92 | "ruff", 93 | "pyright", 94 | "pre-commit", 95 | "types-click", 96 | "types-six", 97 | "types-decorator", 98 | "types-pyyaml", 99 | "types-docutils", 100 | "types-redis", 101 | "types-pymysql", 102 | # docs 103 | "auto-pytabs[sphinx]>=0.5.0", 104 | "shibuya", 105 | "sphinx>=7.0.0; python_version <= \"3.9\"", 106 | "sphinx>=8.0.0; python_version >= \"3.10\"", 107 | "sphinx-autobuild>=2021.3.14", 108 | "sphinx-copybutton>=0.5.2", 109 | "sphinx-click>=6.0.0", 110 | "sphinx-design>=0.5.0", 111 | "sphinxcontrib-mermaid>=0.9.2", 112 | "sphinx-paramlinks>=0.6.0", 113 | "sphinx-togglebutton>=0.3.2", 114 | "sphinx-toolbox>=3.8.1", 115 | "myst-parser", 116 | "sphinx-autodoc-typehints", 117 | "sphinx-rtd-theme", 118 | ] 119 | 120 | ################## 121 | # External Tools # 122 | ################## 123 | 124 | [tool.bumpversion] 125 | allow_dirty = true 126 | commit = true 127 | commit_args = "--no-verify" 128 | current_version = "0.13.0" 129 | ignore_missing_files = false 130 | ignore_missing_version = false 131 | message = "chore(release): bump to v{new_version}" 132 | parse = "(?P\\d+)\\.(?P\\d+)\\.(?P\\d+)" 133 | regex = false 134 | replace = "{new_version}" 135 | search = "{current_version}" 136 | serialize = ["{major}.{minor}.{patch}"] 137 | sign_tags = false 138 | tag = false 139 | tag_message = "chore(release): v{new_version}" 140 | tag_name = "v{new_version}" 141 | 142 | [[tool.bumpversion.files]] 143 | filename = "pyproject.toml" 144 | replace = 'version = "{new_version}"' 145 | search = 'version = "{current_version}"' 146 | 147 | [[tool.bumpversion.files]] 148 | filename = "uv.lock" 149 | replace = """ 150 | name = "pytest-databases" 151 | version = "{new_version}" 152 | """ 153 | search = """ 154 | name = "pytest-databases" 155 | version = "{current_version}" 156 | """ 157 | ## Linting Tools 158 | 159 | [tool.slotscheck] 160 | strict-imports = false 161 | 162 | [tool.codespell] 163 | ignore-words-list = "alog" 164 | skip = 'uv.lock, package-lock.json' 165 | 166 | [tool.pyright] 167 | exclude = ["scripts", "docs"] 168 | include = ["src/pytest_databases", "tests"] 169 | 170 | 171 | [tool.mypy] 172 | disallow_untyped_defs = false 173 | files = ["src/pytest_databases", "tests"] 174 | follow_imports = "normal" # "silent" for not following 175 | ignore_missing_imports = true 176 | pretty = true 177 | show_column_numbers = true 178 | warn_no_return = false 179 | warn_unused_ignores = true 180 | 181 | [[tool.mypy.overrides]] 182 | disable_error_code = "attr-defined" 183 | module = "pytest_databases.docker.spanner" 184 | 185 | [[tool.mypy.overrides]] 186 | disable_error_code = "attr-defined" 187 | module = "pytest_databases.docker.bigquery" 188 | 189 | [[tool.mypy.overrides]] 190 | disable_error_code = "attr-defined" 191 | disallow_untyped_decorators = false 192 | module = "tests.*" 193 | warn_unused_ignores = false 194 | 195 | [[tool.mypy.overrides]] 196 | disable_error_code = ["arg-type"] 197 | disallow_untyped_calls = false 198 | disallow_untyped_decorators = false 199 | module = ["docutils.nodes.*"] 200 | 201 | 202 | [[tool.mypy.overrides]] 203 | ignore_missing_imports = true 204 | module = ["pyodbc", "google.auth.*", "google.cloud.*", "google.protobuf.*", "googleapiclient", "googleapiclient.*"] 205 | 206 | [tool.ruff] 207 | exclude = [ 208 | ".bzr", 209 | ".direnv", 210 | ".eggs", 211 | ".git", 212 | ".hg", 213 | ".mypy_cache", 214 | ".nox", 215 | ".pants.d", 216 | ".ruff_cache", 217 | ".svn", 218 | ".tox", 219 | ".venv", 220 | "__pypackages__", 221 | "_build", 222 | "buck-out", 223 | "build", 224 | "dist", 225 | "node_modules", 226 | "venv", 227 | '__pycache__', 228 | ] 229 | fix = true 230 | include = ["src/**/*.py", "src/**/*.pyi", "tests/**/*.py", "tests/**/*.pyi"] 231 | line-length = 120 232 | lint.fixable = ["ALL"] 233 | lint.ignore = [ 234 | "B027", # Allow non-abstract empty methods in abstract base classes 235 | "FBT003", # Allow boolean positional values in function calls, like `dict.get(... True)` 236 | # Ignore checks for possible passwords 237 | "S105", 238 | "S106", 239 | "S107", 240 | # Ignore complexity 241 | "C901", 242 | "PLR0911", 243 | "PLR0912", 244 | "PLR0913", 245 | "PLR0915", 246 | "PLC1901", # empty string comparisons 247 | "PLW2901", # `for` loop variable overwritten 248 | "SIM114", # Combine `if` branches using logical `or` operator 249 | "E203", # Whitespace before :, needed for black compatability and also `ruff format` 250 | "ISC001", # causes unexpected behaviour with formatter 251 | "E501", # pycodestyle line too long, handled by black 252 | "D100", # pydocstyle - missing docstring in public module 253 | "D101", # pydocstyle - missing docstring in public class 254 | "D102", # pydocstyle - missing docstring in public method 255 | "D103", # pydocstyle - missing docstring in public function 256 | "D104", # pydocstyle - missing docstring in public package 257 | "D105", # pydocstyle - missing docstring in magic method 258 | "D106", # pydocstyle - missing docstring in public nested class 259 | "D107", # pydocstyle - missing docstring in __init__ 260 | "D202", # pydocstyle - no blank lines allowed after function docstring 261 | "D205", # pydocstyle - 1 blank line required between summary line and description 262 | "D415", # pydocstyle - first line should end with a period, question mark, or exclamation point 263 | "UP037", # pyupgrade - removes quotes from type annotation 264 | "A003", # flake8-builtins - class attribute {name} is shadowing a python builtin 265 | "B010", # flake8-bugbear - do not call setattr with a constant attribute value 266 | "B008", # flake8-bugbear - Do not perform function call `Parameter` in argument defaultsRuff(B008) 267 | "RUF012", # ruff - mutable class attributes should be annotated with `typing.ClassVar` 268 | "ANN401", # ruff - Dynamically typed expressions (typing.Any) are disallowed 269 | "PLR0913", # ruff - Too many arguments to function call 270 | "PLR2004", # Magic value used in comparison 271 | "FBT001", # Boolean typed positional argument in function definition 272 | "FBT002", # Boolean default positional argument in function definition 273 | "FBT003", # Boolean Boolean default positional argument in function definition 274 | "ARG002", # Unused method argument 275 | "ARG001", # Unused function argument 276 | "TD002", 277 | "TD003", 278 | "FIX002", 279 | "PGH003", 280 | "RUF006", 281 | "RUF029", # Ruff - Function is declared `async`, but doesn't `await` or use `async` features. # ignore 282 | "SLF001", 283 | "PT007", 284 | 'S603', 285 | "E501", # pycodestyle line too long, handled by black 286 | "PLW2901", # pylint - for loop variable overwritten by assignment target 287 | "ANN401", 288 | "FBT", 289 | "PLR0913", # too many arguments 290 | "PT", 291 | "TD", 292 | "PERF203", # ignore for now; investigate 293 | "COM812", 294 | "PLR0917", 295 | "CPY001", # copywrite check 296 | "DOC201", # `return` is not documented in docstring 297 | "DOC501", # Raised exception missing from docstring 298 | "DOC502", # Raised exception missing from docstring 299 | "A005", # module shadows builtin 300 | "S608", 301 | ] 302 | lint.select = ["ALL"] 303 | # Allow unused variables when underscore-prefixed. 304 | lint.dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" 305 | preview = true # preview features & checks, use with caution 306 | src = ["src", "tests/", "docs/"] 307 | target-version = "py39" 308 | 309 | [tool.ruff.lint.pydocstyle] 310 | convention = "google" 311 | 312 | [tool.ruff.lint.isort] 313 | known-first-party = ["pytest_databases", "tests"] 314 | 315 | [tool.ruff.lint.flake8-tidy-imports] 316 | ban-relative-imports = "all" 317 | 318 | [tool.ruff.format] 319 | docstring-code-format = true 320 | docstring-code-line-length = 60 321 | 322 | [tool.ruff.lint.per-file-ignores] 323 | # Allow print/pprint 324 | "__init__.py" = ['F401', 'D104'] 325 | "__main__.py" = ["E402"] 326 | "docs/*" = ["S404", "INP001"] 327 | "examples/*" = ["T201"] 328 | # Tests can use magic values, assertions, and relative imports 329 | "tests/**/*" = [ 330 | "PLR2004", 331 | "S101", 332 | "TID252", 333 | "ERA001", 334 | "ANN201", 335 | "ANN001", 336 | "D103", 337 | "D104", 338 | "T201", 339 | "S404", 340 | "PLR6301", 341 | "FA102", 342 | ] 343 | 344 | 345 | [tool.ruff.lint.mccabe] 346 | max-complexity = 12 347 | 348 | [tool.ruff.lint.pep8-naming] 349 | classmethod-decorators = ["classmethod"] 350 | 351 | # configure in-case someone runs this 352 | [tool.black] 353 | exclude = ''' 354 | /( 355 | \.git 356 | | \.mypy_cache 357 | | \.tox 358 | | venv 359 | | \.venv 360 | | _build 361 | | buck-out 362 | | build 363 | | dist 364 | )/ 365 | ''' 366 | include = '\.pyi?$' 367 | line-length = 120 368 | 369 | ## Testing Tools 370 | 371 | [tool.pytest.ini_options] 372 | addopts = "--doctest-glob='*.md' --dist=loadgroup" 373 | cdist-group-steal = "3:10" 374 | cdist-justify-items = "file" 375 | filterwarnings = [ 376 | "ignore::DeprecationWarning:pkg_resources", 377 | "ignore::DeprecationWarning:xdist.*", 378 | "ignore::DeprecationWarning:importlib._bootstrap", 379 | ] 380 | markers = [ 381 | "mysql: MySQL Tests", 382 | "postgres: Postgres Tests", 383 | "oracle: Oracle Tests", 384 | "spanner: Google Cloud Spanner Tests", 385 | "duckdb: DuckDB Tests", 386 | "mssql: Microsoft SQL Server Tests", 387 | "elasticsearch: Elasticsearch Tests", 388 | ] 389 | testpaths = ["tests"] 390 | 391 | [tool.coverage.run] 392 | branch = true 393 | concurrency = ["multiprocessing"] 394 | disable_warnings = ["no-data-collected", "module-not-measured", "module-not-imported"] 395 | omit = [ 396 | "_version.py", # automatically created by hatch-vcs, not in repo 397 | "src/pytest_databases/__metadata__.py", 398 | "tests/*", 399 | "scripts/*", 400 | ] 401 | parallel = true 402 | 403 | [tool.coverage.report] 404 | # Regexes for lines to exclude from consideration 405 | exclude_lines = [ 406 | # Have to re-enable the standard pragma 407 | "pragma: no cover", 408 | 409 | # Don't complain about missing debug-only code: 410 | "def __repr__", 411 | "if self\\.debug", 412 | 413 | # Don't complain if tests don't hit defensive assertion code: 414 | "raise AssertionError", 415 | "raise NotImplementedError", 416 | 417 | # Don't complain if non-runnable code isn't run: 418 | "if 0:", 419 | "if __name__ == .__main__.:", 420 | "if TYPE_CHECKING:", 421 | 'class .*\bProtocol\):', 422 | '@(abc\.)?abstractmethod', 423 | ] 424 | 425 | 426 | [project.entry-points.pytest11] 427 | pytest_databases = "pytest_databases._service" 428 | -------------------------------------------------------------------------------- /scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/litestar-org/pytest-databases/461f8982df55a5214068082ea72170c06513363d/scripts/__init__.py -------------------------------------------------------------------------------- /scripts/build_docs.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | import shutil 5 | import subprocess # noqa: S404 6 | from contextlib import contextmanager 7 | from pathlib import Path 8 | from typing import TYPE_CHECKING 9 | 10 | if TYPE_CHECKING: 11 | from collections.abc import Generator 12 | 13 | REDIRECT_TEMPLATE = """ 14 | 15 | 16 | 17 | Page Redirection 18 | 19 | 20 | 21 | 22 | 23 | You are being redirected. If this does not work, click this link 24 | 25 | 26 | """ 27 | 28 | parser = argparse.ArgumentParser() 29 | parser.add_argument("output") 30 | 31 | 32 | @contextmanager 33 | def checkout(branch: str) -> Generator[None, None, None]: 34 | subprocess.run(["git", "checkout", branch], check=True) # noqa: S607 35 | yield 36 | subprocess.run(["git", "checkout", "-"], check=True) # noqa: S607 37 | 38 | 39 | def build(output_dir: str) -> None: 40 | subprocess.run(["make", "docs"], check=True) # noqa: S607 41 | 42 | output_path = Path(output_dir) 43 | output_path.mkdir(parents=True, exist_ok=True) 44 | output_path.joinpath(".nojekyll").touch(exist_ok=True) 45 | output_path.joinpath("index.html").write_text(REDIRECT_TEMPLATE.format(target="latest")) 46 | 47 | docs_src_path = Path("docs/_build/html") 48 | shutil.copytree(docs_src_path, output_path / "latest", dirs_exist_ok=True) 49 | 50 | 51 | def main() -> None: 52 | args = parser.parse_args() 53 | build(output_dir=args.output) 54 | 55 | 56 | if __name__ == "__main__": 57 | main() 58 | -------------------------------------------------------------------------------- /scripts/convert_docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CHANGELOG=docs/changelog.rst 4 | 5 | filename="${CHANGELOG%.*}" 6 | echo "Converting $CHANGELOG to $filename.md" 7 | pandoc --wrap=preserve $CHANGELOG -f rst -t markdown -o "$filename".md 8 | -------------------------------------------------------------------------------- /scripts/install-hatch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # --- Constants --- 4 | BASE_URL="https://github.com/pypa/hatch/releases/latest/download" 5 | EXTRACT_CMD="tar -xzf" 6 | 7 | # --- Handle Optional Installation Directory --- 8 | INSTALL_DIR="$1" # Default: current directory 9 | if [[ -n "$INSTALL_DIR" ]]; then 10 | if [[ ! -d "$INSTALL_DIR" ]]; then # Check if directory exists 11 | INSTALL_DIR="$HOME/.local/bin" 12 | echo "Error: Invalid install directory '$INSTALL_DIR'" 13 | exit 1 14 | fi 15 | INSTALL_DIR=$(realpath "$INSTALL_DIR") # Get absolute path 16 | fi 17 | 18 | # --- Determine Platform --- 19 | PLATFORM=$(uname -s) 20 | MACHINE=$(uname -m) 21 | FILE_EXT="tar.gz" 22 | 23 | if [[ $PLATFORM == "Darwin" ]]; then 24 | PLATFORM_NAME="apple-darwin" 25 | elif [[ $PLATFORM == "Linux" ]]; then 26 | PLATFORM_NAME="unknown-linux-gnu" 27 | if [[ $MACHINE == "aarch64" ]]; then 28 | MACHINE="aarch64" 29 | fi 30 | elif [[ $PLATFORM == "Windows" ]]; then 31 | PLATFORM_NAME="pc-windows-msvc" 32 | FILE_EXT="zip" 33 | EXTRACT_CMD="unzip" 34 | else 35 | echo "Unsupported platform: $PLATFORM" 36 | exit 1 37 | fi 38 | 39 | # --- Construct File Name and URL --- 40 | FILENAME="hatch-$MACHINE-$PLATFORM_NAME.$FILE_EXT" 41 | URL="$BASE_URL/$FILENAME" 42 | 43 | # --- Download and Extract --- 44 | echo "Downloading Hatch binary: $FILENAME" 45 | curl -L -o "$FILENAME" "$URL" 46 | 47 | echo "Extracting to '$INSTALL_DIR'..." 48 | $EXTRACT_CMD "$FILENAME" -C "$INSTALL_DIR" # Extract to install directory 49 | rm "$FILENAME" # Remove archive 50 | 51 | HATCH_BINARY="$INSTALL_DIR/hatch" # Path to the extracted binary 52 | if [[ -x "$HATCH_BINARY" ]]; then 53 | echo "Hatch binary successfully installed at '$HATCH_BINARY'" 54 | else 55 | echo "Error: Hatch binary not found or not executable." 56 | fi 57 | -------------------------------------------------------------------------------- /sonar-project.properties: -------------------------------------------------------------------------------- 1 | sonar.organization=litestar-api 2 | sonar.projectKey=litestar-org_pytest-databases 3 | sonar.python.coverage.reportPaths=coverage.xml 4 | sonar.python.version=3.8, 3.9, 3.10, 3.11, 3.12 5 | sonar.sourceEncoding=UTF-8 6 | sonar.sources=src/pytest_databases 7 | sonar.tests=tests 8 | sonar.coverage.exclusions=\ 9 | src/**/__init__.py, \ 10 | examples/*.py, \ 11 | tests/*.py 12 | sonar.cpd.exclusions=\ 13 | examples/*.py, \ 14 | tests/conftest.py, \ 15 | src/pytest_databases/docker/**/*.py 16 | sonar.projectName=Pytest Databases 17 | -------------------------------------------------------------------------------- /src/pytest_databases/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2024-present Litestar 2 | # 3 | # SPDX-License-Identifier: MIT 4 | -------------------------------------------------------------------------------- /src/pytest_databases/__metadata__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from importlib.metadata import PackageNotFoundError, metadata, version 4 | 5 | __all__ = ("__project__", "__version__") 6 | 7 | try: 8 | __version__ = version("pytest-databases") 9 | """Version of the project.""" 10 | __project__ = metadata("pytest-databases")["Name"] 11 | """Name of the project.""" 12 | except PackageNotFoundError: # pragma: no cover 13 | __version__ = "0.0.0" 14 | __project__ = "pytest-databases" 15 | finally: 16 | del version, PackageNotFoundError, metadata 17 | -------------------------------------------------------------------------------- /src/pytest_databases/_service.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import json 5 | import os 6 | import subprocess # noqa: S404 7 | import time 8 | from contextlib import AbstractContextManager, contextmanager 9 | from typing import TYPE_CHECKING, Any, Callable 10 | 11 | import filelock 12 | import pytest 13 | from docker.errors import APIError, ImageNotFound 14 | from typing_extensions import Self 15 | 16 | from docker import DockerClient 17 | from pytest_databases.helpers import get_xdist_worker_id 18 | from pytest_databases.types import ServiceContainer 19 | 20 | if TYPE_CHECKING: 21 | import pathlib 22 | from collections.abc import Generator 23 | from types import TracebackType 24 | 25 | from docker.models.containers import Container 26 | from docker.types import Ulimit 27 | 28 | 29 | def get_docker_host() -> str: 30 | result = subprocess.run( 31 | ["docker", "context", "ls", "--format=json"], # noqa: S607 32 | text=True, 33 | capture_output=True, 34 | check=True, 35 | ) 36 | docker_ls = result.stdout.splitlines() 37 | # if this is empty, we are not in a dockerized environment; It's probably a podman environment on linux 38 | if not docker_ls or (len(docker_ls) == 1 and docker_ls[0] == "[]"): 39 | uid_result = subprocess.run( 40 | ["id", "-u"], # noqa: S607 41 | text=True, 42 | capture_output=True, 43 | check=True, 44 | ) 45 | uid = uid_result.stdout.strip() 46 | return f"unix:///run/user/{uid}/podman/podman.sock" 47 | contexts = (json.loads(line) for line in docker_ls) 48 | return next(context["DockerEndpoint"] for context in contexts if context["Current"] is True) 49 | 50 | 51 | def get_docker_client() -> DockerClient: 52 | env = {**os.environ} 53 | if "DOCKER_HOST" not in env: 54 | env["DOCKER_HOST"] = get_docker_host() 55 | return DockerClient.from_env(environment=env) 56 | 57 | 58 | def _stop_all_containers(client: DockerClient) -> None: 59 | containers: list[Container] = client.containers.list( 60 | all=True, 61 | filters={"label": "pytest_databases"}, 62 | ignore_removed=True, 63 | ) 64 | for container in containers: 65 | if container.status == "running": 66 | container.kill() 67 | elif container.status in {"stopped", "dead"}: 68 | container.remove() 69 | elif container.status == "removing": 70 | continue 71 | else: 72 | msg = f"Cannot handle container in state {container.status}" 73 | raise RuntimeError(msg) 74 | 75 | 76 | class DockerService(AbstractContextManager): 77 | def __init__( 78 | self, 79 | client: DockerClient, 80 | tmp_path: pathlib.Path, 81 | session: pytest.Session, 82 | ) -> None: 83 | self._client = client 84 | self._tmp_path = tmp_path 85 | self._session = session 86 | self._is_xdist = get_xdist_worker_id() is not None 87 | 88 | def __enter__(self) -> Self: 89 | if self._is_xdist: 90 | ctrl_file = _get_ctrl_file(self._session) 91 | with filelock.FileLock(ctrl_file.with_suffix(".lock")): 92 | if not ctrl_file.exists(): 93 | ctrl_file.touch() 94 | self._stop_all_containers() 95 | else: 96 | self._stop_all_containers() 97 | return self 98 | 99 | def __exit__( 100 | self, 101 | /, 102 | __exc_type: type[BaseException] | None, 103 | __exc_value: BaseException | None, 104 | __traceback: TracebackType | None, 105 | ) -> None: 106 | if not self._is_xdist: 107 | self._stop_all_containers() 108 | 109 | def _get_container(self, name: str) -> Container | None: 110 | containers = self._client.containers.list( 111 | filters={"name": name}, 112 | ) 113 | if len(containers) > 1: 114 | msg = "More than one running container found" 115 | raise ValueError(msg) 116 | if containers: 117 | return containers[0] 118 | return None 119 | 120 | def _stop_all_containers(self) -> None: 121 | _stop_all_containers(self._client) 122 | 123 | @contextmanager 124 | def run( 125 | self, 126 | image: str, 127 | container_port: int, 128 | name: str, 129 | command: str | None = None, 130 | env: dict[str, Any] | None = None, 131 | exec_after_start: str | list[str] | None = None, 132 | check: Callable[[ServiceContainer], bool] | None = None, 133 | wait_for_log: str | bytes | None = None, 134 | timeout: int = 10, 135 | pause: float = 0.1, 136 | transient: bool = False, 137 | ulimits: list[Ulimit] | None = None, 138 | shm_size: int | None = None, 139 | mem_limit: str | None = None, 140 | platform: str | None = None, 141 | ) -> Generator[ServiceContainer, None, None]: 142 | if check is None and wait_for_log is None: 143 | msg = "Must set at least check or wait_for_log" 144 | raise ValueError(msg) 145 | 146 | platform_kwarg = {} 147 | if platform is not None: 148 | platform_kwarg = {"platform": platform} 149 | 150 | name = f"pytest_databases_{name}" 151 | lock = filelock.FileLock(self._tmp_path / name) if self._is_xdist else contextlib.nullcontext() 152 | with lock: 153 | container = self._get_container(name) 154 | try: 155 | self._client.images.get(image) 156 | except ImageNotFound: 157 | self._client.images.pull(*image.rsplit(":", maxsplit=1), **platform_kwarg) # pyright: ignore[reportCallIssue,reportArgumentType] 158 | 159 | if container is None: 160 | container = self._client.containers.run( # pyright: ignore[reportCallIssue,reportArgumentType] 161 | image, 162 | command, 163 | detach=True, 164 | remove=True, 165 | ports={container_port: None}, # pyright: ignore[reportArgumentType] 166 | labels=["pytest_databases"], 167 | name=name, 168 | environment=env, 169 | ulimits=ulimits, 170 | mem_limit=mem_limit, 171 | **platform_kwarg, # pyright: ignore[reportArgumentType] 172 | ) 173 | 174 | # reload the container; sometimes it can take a while before docker 175 | # spins it up and the metadata becomes available, so we're redoing the 176 | # check with a small incremental backup here 177 | for i in range(10): 178 | if any(v for v in container.ports.values()): 179 | break 180 | container.reload() 181 | time.sleep(0.1 + (i / 10)) 182 | else: 183 | msg = f"Service {name!r} failed to create container" 184 | raise ValueError(msg) 185 | 186 | host_port = int( 187 | container.ports[next(k for k in container.ports if k.startswith(str(container_port)))][0]["HostPort"] 188 | ) 189 | service = ServiceContainer( 190 | host="127.0.0.1", 191 | port=host_port, 192 | ) 193 | 194 | started = time.time() 195 | if wait_for_log: 196 | if isinstance(wait_for_log, str): 197 | wait_for_log = wait_for_log.encode() 198 | while time.time() - started < timeout: 199 | if wait_for_log in container.logs(): 200 | break 201 | time.sleep(pause) 202 | else: 203 | msg = f"Service {name!r} failed to come online" 204 | raise ValueError(msg) 205 | 206 | if check: 207 | while time.time() - started < timeout: 208 | if check(service) is True: 209 | break 210 | time.sleep(pause) 211 | else: 212 | msg = f"Service {name!r} failed to come online" 213 | raise ValueError(msg) 214 | 215 | if exec_after_start: 216 | container.exec_run(exec_after_start) 217 | 218 | yield service 219 | 220 | if transient: 221 | try: 222 | container.stop() 223 | container.remove(force=True) 224 | except APIError as exc: # pyright: ignore[reportAttributeAccessIssue] 225 | # '409 - Conflict' means removal is already in progress. this is the 226 | # safest way of delaying with it, since the API is a bit borked when it 227 | # comes to concurrent requests 228 | if exc.status_code not in {409, 404}: 229 | raise 230 | 231 | 232 | @pytest.fixture(scope="session") 233 | def docker_client() -> Generator[DockerClient, None, None]: 234 | client = get_docker_client() 235 | try: 236 | yield client 237 | finally: 238 | client.close() 239 | 240 | 241 | @pytest.fixture(scope="session") 242 | def docker_service( 243 | docker_client: DockerClient, 244 | tmp_path_factory: pytest.TempPathFactory, 245 | request: pytest.FixtureRequest, 246 | ) -> Generator[DockerService, None, None]: 247 | tmp_path = _get_base_tmp_path(tmp_path_factory) 248 | with DockerService( 249 | client=docker_client, 250 | tmp_path=tmp_path, 251 | session=request.session, 252 | ) as service: 253 | yield service 254 | 255 | 256 | def _get_base_tmp_path(tmp_path_factory: pytest.TempPathFactory) -> pathlib.Path: 257 | tmp_path = tmp_path_factory.getbasetemp() 258 | if get_xdist_worker_id() is not None: 259 | tmp_path = tmp_path.parent 260 | return tmp_path 261 | 262 | 263 | def _get_ctrl_file(session: pytest.Session) -> pathlib.Path: 264 | tmp_path = _get_base_tmp_path(session.config._tmp_path_factory) # type: ignore[attr-defined] 265 | return tmp_path / "ctrl" 266 | 267 | 268 | @pytest.hookimpl(wrapper=True) 269 | def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> Generator[Any, Any, Any]: 270 | try: 271 | return (yield) 272 | finally: 273 | if not hasattr(session.config, "workerinput") and _get_ctrl_file(session).exists(): 274 | # if we're running on xdist, delete the ctrl file, telling the deamon proc 275 | # to stop all running containers. 276 | # when not running on xdist, containers are stopped by the service itself 277 | _stop_all_containers(get_docker_client()) 278 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import re 5 | import subprocess # noqa: S404 6 | import time 7 | import timeit 8 | from contextlib import AbstractContextManager 9 | from typing import TYPE_CHECKING, Any, Callable 10 | 11 | from pytest_databases.helpers import simple_string_hash 12 | 13 | if TYPE_CHECKING: 14 | from collections.abc import Awaitable, Generator, Iterable 15 | from pathlib import Path 16 | from types import TracebackType 17 | 18 | TRUE_VALUES = {"True", "true", "1", "yes", "y", "Y", "T", "on", "enabled", "ok"} 19 | 20 | 21 | def wait_until_responsive( 22 | check: Callable[..., bool], 23 | timeout: float, 24 | pause: float, 25 | **kwargs: Any, 26 | ) -> None: 27 | """Wait until a service is responsive. 28 | 29 | Args: 30 | check: Coroutine, return truthy value when waiting should stop. 31 | timeout: Maximum seconds to wait. 32 | pause: Seconds to wait between calls to `check`. 33 | **kwargs: Given as kwargs to `check`. 34 | """ 35 | ref = timeit.default_timer() 36 | now = ref 37 | while (now - ref) < timeout: # sourcery skip 38 | if check(**kwargs): 39 | return 40 | time.sleep(pause) 41 | now = timeit.default_timer() 42 | 43 | msg = "Timeout reached while waiting on service!" 44 | raise RuntimeError(msg) 45 | 46 | 47 | SKIP_DOCKER_COMPOSE: bool = os.environ.get("SKIP_DOCKER_COMPOSE", "False") in TRUE_VALUES 48 | USE_LEGACY_DOCKER_COMPOSE: bool = os.environ.get("USE_LEGACY_DOCKER_COMPOSE", "False") in TRUE_VALUES 49 | COMPOSE_PROJECT_NAME: str = f"pytest-databases-{simple_string_hash(__file__)}" 50 | 51 | 52 | class DockerServiceRegistry(AbstractContextManager): 53 | def __init__( 54 | self, 55 | worker_id: str, 56 | compose_project_name: str = COMPOSE_PROJECT_NAME, 57 | before_start: Iterable[Callable[[], Any]] | None = None, 58 | ) -> None: 59 | self._running_services: set[str] = set() 60 | self.docker_ip = self._get_docker_ip() 61 | self._base_command = ["docker-compose"] if USE_LEGACY_DOCKER_COMPOSE else ["docker", "compose"] 62 | self._compose_files: list[str] = [] 63 | self._base_command.extend( 64 | [ 65 | f"--project-name={compose_project_name}-{worker_id}", 66 | ], 67 | ) 68 | self._before_start = list(before_start) if before_start else [] 69 | 70 | def __exit__( 71 | self, 72 | /, 73 | __exc_type: type[BaseException] | None, 74 | __exc_value: BaseException | None, 75 | __traceback: TracebackType | None, 76 | ) -> None: 77 | self.down() 78 | 79 | @staticmethod 80 | def _get_docker_ip() -> str: 81 | docker_host = os.environ.get("DOCKER_HOST", "").strip() 82 | if not docker_host or docker_host.startswith("unix://"): 83 | return "127.0.0.1" 84 | 85 | if match := re.match(r"^tcp://(.+?):\d+$", docker_host): 86 | return match[1] 87 | 88 | msg = f'Invalid value for DOCKER_HOST: "{docker_host}".' 89 | raise ValueError(msg) 90 | 91 | def run_command(self, *args: str) -> None: 92 | command = [*self._base_command, *self._compose_files, *args] 93 | subprocess.run(command, check=True, capture_output=True) 94 | 95 | def start( 96 | self, 97 | name: str, 98 | docker_compose_files: list[Path], 99 | *, 100 | check: Callable[..., bool], 101 | timeout: float = 30, 102 | pause: float = 0.1, 103 | **kwargs: Any, 104 | ) -> None: 105 | for before_start in self._before_start: 106 | before_start() 107 | 108 | if SKIP_DOCKER_COMPOSE: 109 | self._running_services.add(name) 110 | if name not in self._running_services: 111 | self._compose_files = [f"--file={compose_file}" for compose_file in docker_compose_files] 112 | self.run_command("up", "--force-recreate", "-d", name) 113 | self._running_services.add(name) 114 | 115 | wait_until_responsive( 116 | check=check, 117 | timeout=timeout, 118 | pause=pause, 119 | host=self.docker_ip, 120 | **kwargs, 121 | ) 122 | 123 | def stop(self, name: str) -> None: 124 | self.run_command("down", "--volumes", "-t", "10", name) 125 | 126 | def down(self) -> None: 127 | if not SKIP_DOCKER_COMPOSE: 128 | self.run_command("down", "-t", "10", "--volumes") 129 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/azure_blob.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass 4 | from typing import TYPE_CHECKING 5 | 6 | import pytest 7 | from azure.storage.blob import ContainerClient 8 | from azure.storage.blob.aio import ContainerClient as AsyncContainerClient 9 | 10 | from pytest_databases.helpers import get_xdist_worker_count, get_xdist_worker_num 11 | from pytest_databases.types import ServiceContainer, XdistIsolationLevel 12 | 13 | if TYPE_CHECKING: 14 | from collections.abc import AsyncGenerator, Generator 15 | 16 | from pytest_databases._service import DockerService 17 | 18 | 19 | DEFAULT_ACCOUNT_KEY = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==" 20 | DEFAULT_ACCOUNT_NAME = "devstoreaccount1" 21 | 22 | 23 | @dataclass 24 | class AzureBlobService(ServiceContainer): 25 | connection_string: str 26 | account_url: str 27 | account_key: str 28 | account_name: str 29 | 30 | 31 | @pytest.fixture(scope="session") 32 | def azure_blob_xdist_isolation_level() -> XdistIsolationLevel: 33 | return "database" 34 | 35 | 36 | @pytest.fixture(scope="session") 37 | def azurite_in_memory() -> bool: 38 | return True 39 | 40 | 41 | def _create_account_options(number: int) -> list[tuple[str, str]]: 42 | return [(f"test_account_{i}", DEFAULT_ACCOUNT_KEY) for i in range(number)] 43 | 44 | 45 | @pytest.fixture(scope="session") 46 | def azure_blob_service( 47 | docker_service: DockerService, 48 | azurite_in_memory: bool, 49 | azure_blob_xdist_isolation_level: XdistIsolationLevel, 50 | ) -> Generator[ServiceContainer, None, None]: 51 | command = "azurite-blob --blobHost 0.0.0.0 --blobPort 10000" 52 | if azurite_in_memory: 53 | command += " --inMemoryPersistence" 54 | 55 | name = "azurite-blob" 56 | env = {} 57 | account_name = DEFAULT_ACCOUNT_NAME 58 | account_key = DEFAULT_ACCOUNT_KEY 59 | 60 | worker_num = get_xdist_worker_num() 61 | 62 | if worker_num is not None: 63 | if azure_blob_xdist_isolation_level == "server": 64 | name = f"{name}_{worker_num}" 65 | else: 66 | accounts = _create_account_options(get_xdist_worker_count()) 67 | env["AZURITE_ACCOUNTS"] = ";".join(f"{name}:{key}" for name, key in accounts) 68 | account_name, account_key = accounts[worker_num] 69 | 70 | with docker_service.run( 71 | image="mcr.microsoft.com/azure-storage/azurite", 72 | name=name, 73 | command=command, 74 | wait_for_log="Azurite Blob service successfully listens on", 75 | container_port=10000, 76 | env=env, 77 | ) as service: 78 | account_url = f"http://127.0.0.1:{service.port}/{account_name}" 79 | connection_string = ( 80 | "DefaultEndpointsProtocol=http;" 81 | f"AccountName={account_name};" 82 | f"AccountKey={account_key};" 83 | f"BlobEndpoint={account_url};" 84 | ) 85 | 86 | yield AzureBlobService( 87 | host=service.host, 88 | port=service.port, 89 | connection_string=connection_string, 90 | account_url=account_url, 91 | account_key=account_key, 92 | account_name=account_name, 93 | ) 94 | 95 | 96 | @pytest.fixture(scope="session") 97 | def azure_blob_default_container_name() -> str: 98 | return "pytest-databases" 99 | 100 | 101 | @pytest.fixture(scope="session") 102 | def azure_blob_container_client( 103 | azure_blob_service: AzureBlobService, 104 | azure_blob_default_container_name: str, 105 | ) -> Generator[ContainerClient, None, None]: 106 | with ContainerClient.from_connection_string( 107 | azure_blob_service.connection_string, 108 | container_name=azure_blob_default_container_name, 109 | ) as container_client: 110 | yield container_client 111 | 112 | 113 | @pytest.fixture(scope="session") 114 | async def azure_blob_async_container_client( 115 | azure_blob_service: AzureBlobService, 116 | azure_blob_default_container_name: str, 117 | ) -> AsyncGenerator[AsyncContainerClient, None]: 118 | async with AsyncContainerClient.from_connection_string( 119 | azure_blob_service.connection_string, 120 | container_name=azure_blob_default_container_name, 121 | ) as container_client: 122 | yield container_client 123 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/bigquery.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass 4 | from typing import TYPE_CHECKING 5 | 6 | import pytest 7 | from google.api_core.client_options import ClientOptions 8 | from google.auth.credentials import AnonymousCredentials, Credentials 9 | from google.cloud import bigquery 10 | 11 | from pytest_databases.helpers import get_xdist_worker_num 12 | from pytest_databases.types import ServiceContainer, XdistIsolationLevel 13 | 14 | if TYPE_CHECKING: 15 | from collections.abc import Generator 16 | 17 | from pytest_databases._service import DockerService 18 | 19 | 20 | @pytest.fixture(scope="session") 21 | def xdist_bigquery_isolation_level() -> XdistIsolationLevel: 22 | return "database" 23 | 24 | 25 | @pytest.fixture(scope="session") 26 | def bigquery_image() -> str: 27 | return "ghcr.io/goccy/bigquery-emulator:latest" 28 | 29 | 30 | @pytest.fixture(scope="session") 31 | def platform() -> str: 32 | return "linux/x86_64" 33 | 34 | 35 | @dataclass 36 | class BigQueryService(ServiceContainer): 37 | project: str 38 | dataset: str 39 | credentials: Credentials 40 | 41 | @property 42 | def endpoint(self) -> str: 43 | return f"http://{self.host}:{self.port}" 44 | 45 | @property 46 | def client_options(self) -> ClientOptions: 47 | return ClientOptions(api_endpoint=self.endpoint) 48 | 49 | 50 | @pytest.fixture(scope="session") 51 | def bigquery_service( 52 | docker_service: DockerService, 53 | xdist_bigquery_isolation_level: XdistIsolationLevel, 54 | bigquery_image: str, 55 | platform: str, 56 | ) -> Generator[BigQueryService, None, None]: 57 | project = "emulator-test-project" 58 | dataset = "test-dataset" 59 | container_name = "bigquery" 60 | 61 | worker_num = get_xdist_worker_num() 62 | if worker_num is not None: 63 | container_name += f"_{worker_num}" 64 | 65 | def check(_service: ServiceContainer) -> bool: 66 | try: 67 | client = bigquery.Client( 68 | project=project, 69 | client_options=ClientOptions(api_endpoint=f"http://{_service.host}:{_service.port}"), 70 | credentials=AnonymousCredentials(), 71 | ) 72 | 73 | job = client.query(query="SELECT 1 as one") 74 | 75 | resp = list(job.result()) 76 | return resp[0].one == 1 77 | except Exception: # noqa: BLE001 78 | return False 79 | 80 | with docker_service.run( 81 | image=bigquery_image, 82 | command=f"--project={project} --dataset={dataset}", 83 | name=container_name, 84 | check=check, 85 | env={ 86 | "PROJECT_ID": project, 87 | "DATASET_NAME": dataset, 88 | }, 89 | container_port=9050, 90 | timeout=60, 91 | transient=xdist_bigquery_isolation_level == "server", 92 | platform=platform, 93 | ) as service: 94 | yield BigQueryService( 95 | host=service.host, 96 | port=service.port, 97 | project=project, 98 | dataset=dataset, 99 | credentials=AnonymousCredentials(), 100 | ) 101 | 102 | 103 | @pytest.fixture(scope="session") 104 | def bigquery_client(bigquery_service: BigQueryService) -> Generator[bigquery.Client, None, None]: 105 | yield bigquery.Client( 106 | project=bigquery_service.project, 107 | client_options=bigquery_service.client_options, 108 | credentials=bigquery_service.credentials, 109 | ) 110 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/cockroachdb.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass 4 | from typing import TYPE_CHECKING 5 | 6 | import psycopg 7 | import pytest 8 | 9 | from pytest_databases._service import DockerService, ServiceContainer 10 | from pytest_databases.helpers import get_xdist_worker_num 11 | 12 | if TYPE_CHECKING: 13 | from collections.abc import Generator 14 | 15 | from pytest_databases.types import XdistIsolationLevel 16 | 17 | 18 | @pytest.fixture(scope="session") 19 | def xdist_cockroachdb_isolation_level() -> XdistIsolationLevel: 20 | return "database" 21 | 22 | 23 | @dataclass 24 | class CockroachDBService(ServiceContainer): 25 | database: str 26 | driver_opts: dict[str, str] 27 | 28 | 29 | @pytest.fixture(scope="session") 30 | def cockroachdb_driver_opts() -> dict[str, str]: 31 | return {"sslmode": "disable"} 32 | 33 | 34 | @pytest.fixture(scope="session") 35 | def cockroachdb_image() -> str: 36 | return "cockroachdb/cockroach:latest" 37 | 38 | 39 | @pytest.fixture(scope="session") 40 | def cockroachdb_service( 41 | docker_service: DockerService, 42 | xdist_cockroachdb_isolation_level: XdistIsolationLevel, 43 | cockroachdb_driver_opts: dict[str, str], 44 | cockroachdb_image: str, 45 | ) -> Generator[CockroachDBService, None, None]: 46 | def cockroachdb_responsive(_service: ServiceContainer) -> bool: 47 | opts = "&".join(f"{k}={v}" for k, v in cockroachdb_driver_opts.items()) if cockroachdb_driver_opts else "" 48 | try: 49 | conn = psycopg.connect(f"postgresql://root@{_service.host}:{_service.port}/defaultdb?{opts}") 50 | except Exception: # noqa: BLE001 51 | return False 52 | 53 | try: 54 | db_open = conn.execute("SELECT 1").fetchone() 55 | return bool(db_open is not None and db_open[0] == 1) 56 | finally: 57 | conn.close() 58 | 59 | container_name = "cockroachdb" 60 | db_name = "pytest_databases" 61 | worker_num = get_xdist_worker_num() 62 | if worker_num is not None: 63 | suffix = f"_{worker_num}" 64 | if xdist_cockroachdb_isolation_level == "server": 65 | container_name += suffix 66 | else: 67 | db_name += suffix 68 | 69 | with docker_service.run( 70 | image=cockroachdb_image, 71 | container_port=26257, 72 | check=cockroachdb_responsive, 73 | name=container_name, 74 | command="start-single-node --insecure", 75 | exec_after_start=f'cockroach sql --insecure -e "CREATE DATABASE {db_name}";', 76 | transient=xdist_cockroachdb_isolation_level == "server", 77 | ) as service: 78 | yield CockroachDBService( 79 | host=service.host, 80 | port=service.port, 81 | database=db_name, 82 | driver_opts=cockroachdb_driver_opts, 83 | ) 84 | 85 | 86 | @pytest.fixture(scope="session") 87 | def cockroachdb_connection( 88 | cockroachdb_service: CockroachDBService, 89 | cockroachdb_driver_opts: dict[str, str], 90 | ) -> Generator[psycopg.Connection, None, None]: 91 | opts = "&".join(f"{k}={v}" for k, v in cockroachdb_driver_opts.items()) if cockroachdb_driver_opts else "" 92 | with psycopg.connect( 93 | f"postgresql://root@{cockroachdb_service.host}:{cockroachdb_service.port}/{cockroachdb_service.database}?{opts}" 94 | ) as conn: 95 | yield conn 96 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/elastic_search.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import dataclasses 5 | import traceback 6 | from typing import TYPE_CHECKING 7 | 8 | import pytest 9 | from elasticsearch7 import Elasticsearch as Elasticsearch7 10 | from elasticsearch7 import Elasticsearch as Elasticsearch8 11 | 12 | from pytest_databases.types import ServiceContainer 13 | 14 | if TYPE_CHECKING: 15 | from collections.abc import Generator 16 | 17 | from pytest_databases._service import DockerService 18 | 19 | 20 | @dataclasses.dataclass 21 | class ElasticsearchService(ServiceContainer): 22 | scheme: str 23 | user: str 24 | password: str 25 | database: str 26 | 27 | 28 | def elasticsearch7_responsive(scheme: str, host: str, port: int, user: str, password: str, database: str) -> bool: 29 | try: 30 | with Elasticsearch7( 31 | hosts=[{"host": host, "port": port, "scheme": scheme}], verify_certs=False, http_auth=(user, password) 32 | ) as client: 33 | return client.ping() 34 | except Exception: # noqa: BLE001 35 | return False 36 | 37 | 38 | def elasticsearch8_responsive(scheme: str, host: str, port: int, user: str, password: str, database: str) -> bool: 39 | try: 40 | with Elasticsearch8( 41 | hosts=[{"host": host, "port": port, "scheme": scheme}], verify_certs=False, basic_auth=(user, password) 42 | ) as client: 43 | return client.ping() 44 | except Exception: # noqa: BLE001 45 | traceback.print_exc() 46 | return False 47 | 48 | 49 | @pytest.fixture(scope="session") 50 | def elasticsearch_service_memory_limit() -> str: 51 | return "500m" 52 | 53 | 54 | @contextlib.contextmanager 55 | def _provide_elasticsearch_service( 56 | docker_service: DockerService, 57 | image: str, 58 | name: str, 59 | client_cls: type[Elasticsearch7 | Elasticsearch8], 60 | memory_limit: str, 61 | ) -> Generator[ElasticsearchService, None, None]: 62 | user = "elastic" 63 | password = "changeme" 64 | database = "db" 65 | scheme = "http" 66 | 67 | def check(_service: ServiceContainer) -> bool: 68 | try: 69 | with client_cls( 70 | hosts=[{"host": _service.host, "port": _service.port, "scheme": scheme}], 71 | verify_certs=False, 72 | http_auth=(user, password), 73 | ) as client: 74 | return client.ping() 75 | except Exception: # noqa: BLE001 76 | return False 77 | 78 | with docker_service.run( 79 | image=image, 80 | name=name, 81 | container_port=9200, 82 | env={ 83 | "discovery.type": "single-node", 84 | "xpack.security.enabled": "false", 85 | }, 86 | check=check, 87 | timeout=120, 88 | pause=1, 89 | transient=True, 90 | mem_limit="1g", 91 | ) as service: 92 | yield ElasticsearchService( 93 | host=service.host, 94 | port=service.port, 95 | user=user, 96 | password=password, 97 | scheme=scheme, 98 | database=database, 99 | ) 100 | 101 | 102 | @pytest.fixture(autouse=False, scope="session") 103 | def elasticsearch_7_service( 104 | docker_service: DockerService, 105 | elasticsearch_service_memory_limit: str, 106 | ) -> Generator[ElasticsearchService, None, None]: 107 | with _provide_elasticsearch_service( 108 | docker_service=docker_service, 109 | image="elasticsearch:7.17.19", 110 | name="elasticsearch-7", 111 | client_cls=Elasticsearch7, 112 | memory_limit=elasticsearch_service_memory_limit, 113 | ) as service: 114 | yield service 115 | 116 | 117 | @pytest.fixture(autouse=False, scope="session") 118 | def elasticsearch_8_service( 119 | docker_service: DockerService, 120 | elasticsearch_service_memory_limit: str, 121 | ) -> Generator[ElasticsearchService, None, None]: 122 | with _provide_elasticsearch_service( 123 | docker_service=docker_service, 124 | image="elasticsearch:8.13.0", 125 | name="elasticsearch-8", 126 | client_cls=Elasticsearch8, 127 | memory_limit=elasticsearch_service_memory_limit, 128 | ) as service: 129 | yield service 130 | 131 | 132 | @pytest.fixture(autouse=False, scope="session") 133 | def elasticsearch_service(elasticsearch8_service: ElasticsearchService) -> ElasticsearchService: 134 | return elasticsearch8_service 135 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/mariadb.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import traceback 5 | from collections.abc import Generator 6 | from dataclasses import dataclass 7 | from typing import TYPE_CHECKING 8 | 9 | import mariadb 10 | import pytest 11 | 12 | from pytest_databases.helpers import get_xdist_worker_num 13 | from pytest_databases.types import ServiceContainer, XdistIsolationLevel 14 | 15 | if TYPE_CHECKING: 16 | from collections.abc import Generator 17 | 18 | from pytest_databases._service import DockerService 19 | 20 | 21 | @dataclass 22 | class MariaDBService(ServiceContainer): 23 | db: str 24 | user: str 25 | password: str 26 | 27 | 28 | @pytest.fixture(scope="session") 29 | def xdist_mariadb_isolation_level() -> XdistIsolationLevel: 30 | return "database" 31 | 32 | 33 | @contextlib.contextmanager 34 | def _provide_mysql_service( 35 | docker_service: DockerService, 36 | image: str, 37 | name: str, 38 | isolation_level: XdistIsolationLevel, 39 | ) -> Generator[MariaDBService, None, None]: 40 | user = "app" 41 | password = "super-secret" 42 | root_password = "super-secret" 43 | database = "db" 44 | 45 | def check(_service: ServiceContainer) -> bool: 46 | try: 47 | conn = mariadb.connect( 48 | host=_service.host, 49 | port=_service.port, 50 | user=user, 51 | database=database, 52 | password=password, 53 | ) 54 | except Exception: # noqa: BLE001 55 | traceback.print_exc() 56 | return False 57 | 58 | try: 59 | with conn.cursor() as cursor: 60 | cursor.execute("select 1 as is_available") 61 | resp = cursor.fetchone() 62 | return resp is not None and resp[0] == 1 63 | finally: 64 | with contextlib.suppress(Exception): 65 | conn.close() 66 | 67 | worker_num = get_xdist_worker_num() 68 | db_name = "pytest_databases" 69 | if worker_num is not None: 70 | suffix = f"_{worker_num}" 71 | if isolation_level == "server": 72 | name += suffix 73 | else: 74 | db_name += suffix 75 | 76 | with docker_service.run( 77 | image=image, 78 | check=check, 79 | container_port=3306, 80 | name=name, 81 | env={ 82 | "MARIADB_ROOT_PASSWORD": root_password, 83 | "MARIADB_PASSWORD": password, 84 | "MARIADB_USER": user, 85 | "MARIADB_DATABASE": database, 86 | "MARIADB_ROOT_HOST": "%", 87 | "LANG": "C.UTF-8", 88 | }, 89 | timeout=60, 90 | pause=0.5, 91 | exec_after_start=( 92 | f'mariadb --user=root --password={root_password} -e "CREATE DATABASE {db_name};' 93 | f"GRANT ALL PRIVILEGES ON *.* TO '{user}'@'%'; " 94 | 'FLUSH PRIVILEGES;"' 95 | ), 96 | transient=isolation_level == "server", 97 | ) as service: 98 | yield MariaDBService( 99 | db=db_name, 100 | host=service.host, 101 | port=service.port, 102 | user=user, 103 | password=password, 104 | ) 105 | 106 | 107 | @pytest.fixture(autouse=False, scope="session") 108 | def mariadb_113_service( 109 | docker_service: DockerService, 110 | xdist_mariadb_isolation_level: XdistIsolationLevel, 111 | ) -> Generator[MariaDBService, None, None]: 112 | with _provide_mysql_service( 113 | docker_service=docker_service, 114 | image="mariadb:11.3", 115 | name="mariadb-11.3", 116 | isolation_level=xdist_mariadb_isolation_level, 117 | ) as service: 118 | yield service 119 | 120 | 121 | @pytest.fixture(autouse=False, scope="session") 122 | def mariadb_service(mariadb_113_service: MariaDBService) -> MariaDBService: 123 | return mariadb_113_service 124 | 125 | 126 | @pytest.fixture(autouse=False, scope="session") 127 | def mariadb_113_connection(mariadb_113_service: MariaDBService) -> Generator[mariadb.Connection, None, None]: 128 | with mariadb.connect( 129 | host=mariadb_113_service.host, 130 | port=mariadb_113_service.port, 131 | user=mariadb_113_service.user, 132 | database=mariadb_113_service.db, 133 | password=mariadb_113_service.password, 134 | ) as conn: 135 | yield conn 136 | 137 | 138 | @pytest.fixture(autouse=False, scope="session") 139 | def mariadb_connection(mariadb_113_connection: mariadb.Connection) -> mariadb.Connection: 140 | return mariadb_113_connection 141 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/minio.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dataclasses import dataclass 3 | from typing import TYPE_CHECKING 4 | from urllib.error import URLError 5 | from urllib.request import Request, urlopen 6 | 7 | import pytest 8 | from minio.api import Minio 9 | 10 | from pytest_databases.docker import TRUE_VALUES 11 | from pytest_databases.helpers import get_xdist_worker_num 12 | from pytest_databases.types import ServiceContainer, XdistIsolationLevel 13 | 14 | if TYPE_CHECKING: 15 | from collections.abc import Generator 16 | 17 | from pytest_databases._service import DockerService 18 | 19 | 20 | @dataclass 21 | class MinioService(ServiceContainer): 22 | endpoint: str 23 | access_key: str 24 | secret_key: str 25 | secure: bool 26 | 27 | 28 | @pytest.fixture(scope="session") 29 | def minio_access_key() -> str: 30 | return os.getenv("MINIO_ACCESS_KEY", "minio") 31 | 32 | 33 | @pytest.fixture(scope="session") 34 | def minio_secret_key() -> str: 35 | return os.getenv("MINIO_SECRET_KEY", "minio123") 36 | 37 | 38 | @pytest.fixture(scope="session") 39 | def minio_secure() -> bool: 40 | return os.getenv("MINIO_SECURE", "false").lower() in TRUE_VALUES 41 | 42 | 43 | @pytest.fixture(scope="session") 44 | def xdist_minio_isolation_level() -> XdistIsolationLevel: 45 | return "database" 46 | 47 | 48 | @pytest.fixture(scope="session") 49 | def minio_default_bucket_name(xdist_minio_isolation_level: XdistIsolationLevel) -> str: 50 | worker_num = get_xdist_worker_num() 51 | if worker_num is not None and xdist_minio_isolation_level == "server": 52 | return f"pytest-databases-{worker_num}" 53 | return "pytest-databases" 54 | 55 | 56 | @pytest.fixture(scope="session") 57 | def minio_service( 58 | docker_service: "DockerService", 59 | minio_access_key: str, 60 | minio_secret_key: str, 61 | minio_secure: bool, 62 | ) -> "Generator[MinioService, None, None]": 63 | def check(_service: ServiceContainer) -> bool: 64 | scheme = "https" if minio_secure else "http" 65 | url = f"{scheme}://{_service.host}:{_service.port}/minio/health/ready" 66 | if not url.startswith(("http:", "https:")): 67 | msg = "URL must start with 'http:' or 'https:'" 68 | raise ValueError(msg) 69 | try: 70 | with urlopen(url=Request(url, method="GET"), timeout=10) as response: # noqa: S310 71 | return response.status == 200 72 | except (URLError, ConnectionError): 73 | return False 74 | 75 | command = "server /data" 76 | name = "minio" 77 | env = { 78 | "MINIO_ROOT_USER": minio_access_key, 79 | "MINIO_ROOT_PASSWORD": minio_secret_key, 80 | } 81 | 82 | with docker_service.run( 83 | image="quay.io/minio/minio", 84 | name=name, 85 | command=command, 86 | container_port=9000, 87 | timeout=20, 88 | pause=0.5, 89 | env=env, 90 | check=check, 91 | ) as service: 92 | yield MinioService( 93 | host=service.host, 94 | port=service.port, 95 | endpoint=f"{service.host}:{service.port}", 96 | access_key=minio_access_key, 97 | secret_key=minio_secret_key, 98 | secure=minio_secure, 99 | ) 100 | 101 | 102 | @pytest.fixture(scope="session") 103 | def minio_client( 104 | minio_service: "MinioService", 105 | minio_default_bucket_name: str, 106 | ) -> "Generator[Minio, None, None]": 107 | client = Minio( 108 | endpoint=minio_service.endpoint, 109 | access_key=minio_service.access_key, 110 | secret_key=minio_service.secret_key, 111 | secure=minio_service.secure, 112 | ) 113 | try: 114 | if not client.bucket_exists(minio_default_bucket_name): 115 | client.make_bucket(minio_default_bucket_name) 116 | except Exception as e: 117 | msg = f"Failed to create bucket {minio_default_bucket_name}" 118 | raise RuntimeError(msg) from e 119 | else: 120 | yield client 121 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/mssql.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import dataclasses 4 | from typing import TYPE_CHECKING 5 | 6 | import pymssql 7 | import pytest 8 | 9 | from pytest_databases.helpers import get_xdist_worker_num 10 | from pytest_databases.types import ServiceContainer, XdistIsolationLevel 11 | 12 | if TYPE_CHECKING: 13 | from collections.abc import Generator 14 | 15 | from pytest_databases._service import DockerService 16 | 17 | 18 | @pytest.fixture(scope="session") 19 | def xdist_mssql_isolation_level() -> XdistIsolationLevel: 20 | return "database" 21 | 22 | 23 | @pytest.fixture(scope="session") 24 | def mssql_image() -> str: 25 | return "mcr.microsoft.com/mssql/server:2022-latest" 26 | 27 | 28 | @dataclasses.dataclass 29 | class MSSQLService(ServiceContainer): 30 | user: str 31 | password: str 32 | database: str 33 | 34 | @property 35 | def connection_string(self) -> str: 36 | return ( 37 | "encrypt=no; " 38 | "TrustServerCertificate=yes; " 39 | f"driver={{ODBC Driver 18 for SQL Server}}; " 40 | f"server={self.host},{self.port}; " 41 | f"database={self.database}; " 42 | f"UID={self.user}; " 43 | f"PWD={self.password}" 44 | ) 45 | 46 | 47 | @pytest.fixture(autouse=False, scope="session") 48 | def mssql_service( 49 | docker_service: DockerService, 50 | xdist_mssql_isolation_level: XdistIsolationLevel, 51 | mssql_image: str, 52 | ) -> Generator[MSSQLService, None, None]: 53 | password = "Super-secret1" 54 | 55 | def check(_service: ServiceContainer) -> bool: 56 | try: 57 | with pymssql.connect( 58 | user="sa", 59 | password=password, 60 | database="master", 61 | host=_service.host, 62 | port=str(_service.port), 63 | timeout=2, 64 | ) as conn, conn.cursor() as cursor: 65 | cursor.execute("select 1 as is_available") 66 | resp = cursor.fetchone() 67 | return resp[0] == 1 if resp is not None else False 68 | except Exception: # noqa: BLE001 69 | return False 70 | 71 | worker_num = get_xdist_worker_num() 72 | db_name = "pytest_databases" 73 | name = "mssql" 74 | if worker_num is not None: 75 | suffix = f"_{worker_num}" 76 | if xdist_mssql_isolation_level == "server": 77 | name += suffix 78 | else: 79 | db_name += suffix 80 | 81 | with docker_service.run( 82 | image=mssql_image, 83 | check=check, 84 | container_port=1433, 85 | name=name, 86 | env={ 87 | "SA_PASSWORD": password, 88 | "MSSQL_PID": "Developer", 89 | "ACCEPT_EULA": "Y", 90 | "MSSQL_TCP_PORT": "1433", 91 | }, 92 | timeout=100, 93 | pause=1, 94 | transient=xdist_mssql_isolation_level == "server", 95 | ) as service: 96 | with pymssql.connect( 97 | user="sa", 98 | password=password, 99 | database="master", 100 | host=service.host, 101 | port=str(service.port), 102 | timeout=2, 103 | autocommit=True, 104 | ) as conn, conn.cursor() as cursor: 105 | cursor.execute(f"CREATE DATABASE {db_name}") 106 | 107 | yield MSSQLService( 108 | database=db_name, 109 | host=service.host, 110 | port=service.port, 111 | user="sa", 112 | password=password, 113 | ) 114 | 115 | 116 | @pytest.fixture(autouse=False, scope="session") 117 | def mssql_connection(mssql_service: MSSQLService) -> Generator[pymssql.Connection, None, None]: 118 | with pymssql.connect( 119 | host=mssql_service.host, 120 | port=str(mssql_service.port), 121 | database=mssql_service.database, 122 | user=mssql_service.user, 123 | password=mssql_service.password, 124 | timeout=2, 125 | ) as db_connection: 126 | yield db_connection 127 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/mysql.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | from dataclasses import dataclass 5 | from typing import TYPE_CHECKING 6 | 7 | import mysql.connector 8 | import pytest 9 | 10 | from pytest_databases._service import DockerService, ServiceContainer 11 | from pytest_databases.helpers import get_xdist_worker_num 12 | 13 | if TYPE_CHECKING: 14 | from collections.abc import Generator 15 | 16 | from mysql.connector.abstracts import MySQLConnectionAbstract 17 | 18 | from pytest_databases.types import XdistIsolationLevel 19 | 20 | 21 | @dataclass 22 | class MySQLService(ServiceContainer): 23 | db: str 24 | user: str 25 | password: str 26 | 27 | 28 | @pytest.fixture(scope="session") 29 | def xdist_mysql_isolation_level() -> XdistIsolationLevel: 30 | return "database" 31 | 32 | 33 | @pytest.fixture(scope="session") 34 | def platform() -> str: 35 | return "linux/x86_64" 36 | 37 | 38 | @contextlib.contextmanager 39 | def _provide_mysql_service( 40 | docker_service: DockerService, 41 | image: str, 42 | name: str, 43 | isolation_level: XdistIsolationLevel, 44 | platform: str, 45 | ) -> Generator[MySQLService, None, None]: 46 | user = "app" 47 | password = "super-secret" 48 | root_password = "super-secret" 49 | database = "db" 50 | 51 | def check(_service: ServiceContainer) -> bool: 52 | try: 53 | conn = mysql.connector.connect( 54 | host=_service.host, 55 | port=_service.port, 56 | user=user, 57 | database=database, 58 | password=password, 59 | ) 60 | except mysql.connector.errors.OperationalError as exc: 61 | if "Lost connection" in exc.msg: # type: ignore 62 | return False 63 | raise 64 | 65 | try: 66 | with conn.cursor() as cursor: 67 | cursor.execute("select 1 as is_available") 68 | resp = cursor.fetchone() 69 | return resp is not None and resp[0] == 1 # type: ignore 70 | finally: 71 | with contextlib.suppress(Exception): 72 | conn.close() 73 | 74 | worker_num = get_xdist_worker_num() 75 | db_name = "pytest_databases" 76 | if worker_num is not None: 77 | suffix = f"_{worker_num}" 78 | if isolation_level == "server": 79 | name += suffix 80 | else: 81 | db_name += suffix 82 | 83 | with docker_service.run( 84 | image=image, 85 | check=check, 86 | container_port=3306, 87 | name=name, 88 | env={ 89 | "MYSQL_ROOT_PASSWORD": root_password, 90 | "MYSQL_PASSWORD": password, 91 | "MYSQL_USER": user, 92 | "MYSQL_DATABASE": database, 93 | "MYSQL_ROOT_HOST": "%", 94 | "LANG": "C.UTF-8", 95 | }, 96 | timeout=60, 97 | pause=0.5, 98 | exec_after_start=( 99 | f'mysql --user=root --password={root_password} -e "CREATE DATABASE {db_name};' 100 | f"GRANT ALL PRIVILEGES ON *.* TO '{user}'@'%'; " 101 | 'FLUSH PRIVILEGES;"' 102 | ), 103 | transient=isolation_level == "server", 104 | platform=platform, 105 | ) as service: 106 | yield MySQLService( 107 | db=db_name, 108 | host=service.host, 109 | port=service.port, 110 | user=user, 111 | password=password, 112 | ) 113 | 114 | 115 | @pytest.fixture(scope="session") 116 | def mysql_service(mysql_8_service: MySQLService) -> MySQLService: 117 | return mysql_8_service 118 | 119 | 120 | @pytest.fixture(scope="session") 121 | def mysql_56_service( 122 | docker_service: DockerService, 123 | xdist_mysql_isolation_level: XdistIsolationLevel, 124 | platform: str, 125 | ) -> Generator[MySQLService, None, None]: 126 | with _provide_mysql_service( 127 | image="mysql:5.6", 128 | name="mysql-56", 129 | docker_service=docker_service, 130 | isolation_level=xdist_mysql_isolation_level, 131 | platform=platform, 132 | ) as service: 133 | yield service 134 | 135 | 136 | @pytest.fixture(scope="session") 137 | def mysql_57_service( 138 | docker_service: DockerService, 139 | xdist_mysql_isolation_level: XdistIsolationLevel, 140 | platform: str, 141 | ) -> Generator[MySQLService, None, None]: 142 | with _provide_mysql_service( 143 | image="mysql:5.7", 144 | name="mysql-57", 145 | docker_service=docker_service, 146 | isolation_level=xdist_mysql_isolation_level, 147 | platform=platform, 148 | ) as service: 149 | yield service 150 | 151 | 152 | @pytest.fixture(scope="session") 153 | def mysql_8_service( 154 | docker_service: DockerService, 155 | xdist_mysql_isolation_level: XdistIsolationLevel, 156 | platform: str, 157 | ) -> Generator[MySQLService, None, None]: 158 | with _provide_mysql_service( 159 | image="mysql:8", 160 | name="mysql-8", 161 | docker_service=docker_service, 162 | isolation_level=xdist_mysql_isolation_level, 163 | platform=platform, 164 | ) as service: 165 | yield service 166 | 167 | 168 | @pytest.fixture(scope="session") 169 | def mysql_56_connection(mysql_56_service: MySQLService) -> Generator[MySQLConnectionAbstract, None, None]: 170 | with mysql.connector.connect( 171 | host=mysql_56_service.host, 172 | port=mysql_56_service.port, 173 | user=mysql_56_service.user, 174 | database=mysql_56_service.db, 175 | password=mysql_56_service.password, 176 | ) as conn: 177 | yield conn # type: ignore 178 | 179 | 180 | @pytest.fixture(scope="session") 181 | def mysql_57_connection(mysql_57_service: MySQLService) -> Generator[MySQLConnectionAbstract, None, None]: 182 | with mysql.connector.connect( 183 | host=mysql_57_service.host, 184 | port=mysql_57_service.port, 185 | user=mysql_57_service.user, 186 | database=mysql_57_service.db, 187 | password=mysql_57_service.password, 188 | ) as conn: 189 | yield conn # type: ignore 190 | 191 | 192 | @pytest.fixture(scope="session") 193 | def mysql_connection(mysql_8_connection: MySQLConnectionAbstract) -> MySQLConnectionAbstract: 194 | return mysql_8_connection 195 | 196 | 197 | @pytest.fixture(scope="session") 198 | def mysql_8_connection(mysql_8_service: MySQLService) -> Generator[MySQLConnectionAbstract, None, None]: 199 | with mysql.connector.connect( 200 | host=mysql_8_service.host, 201 | port=mysql_8_service.port, 202 | user=mysql_8_service.user, 203 | database=mysql_8_service.db, 204 | password=mysql_8_service.password, 205 | ) as conn: 206 | yield conn # type: ignore 207 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/oracle.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | from dataclasses import dataclass 5 | from typing import TYPE_CHECKING 6 | 7 | import oracledb 8 | import pytest 9 | 10 | from pytest_databases.helpers import get_xdist_worker_num 11 | from pytest_databases.types import ServiceContainer 12 | 13 | if TYPE_CHECKING: 14 | from collections.abc import Generator 15 | 16 | from pytest_databases._service import DockerService 17 | 18 | 19 | def oracle_responsive(host: str, port: int, service_name: str, user: str, password: str) -> bool: 20 | try: 21 | conn = oracledb.connect( 22 | host=host, 23 | port=port, 24 | user=user, 25 | service_name=service_name, 26 | password=password, 27 | ) 28 | with conn.cursor() as cursor: 29 | cursor.execute("SELECT 1 FROM dual") 30 | resp = cursor.fetchone() 31 | return resp[0] == 1 if resp is not None else False 32 | except Exception: # noqa: BLE001 33 | return False 34 | 35 | 36 | @dataclass 37 | class OracleService(ServiceContainer): 38 | user: str 39 | password: str 40 | system_password: str 41 | service_name: str 42 | 43 | 44 | @contextlib.contextmanager 45 | def _provide_oracle_service( 46 | docker_service: DockerService, 47 | image: str, 48 | name: str, 49 | service_name: str, 50 | ) -> Generator[OracleService, None, None]: 51 | user = "app" 52 | password = "super-secret" 53 | system_password = "super-secret" 54 | 55 | def check(_service: ServiceContainer) -> bool: 56 | try: 57 | conn = oracledb.connect( 58 | host=_service.host, 59 | port=_service.port, 60 | user=user, 61 | password=password, 62 | service_name=service_name, 63 | ) 64 | with conn.cursor() as cursor: 65 | cursor.execute("SELECT 1 FROM dual") 66 | resp = cursor.fetchone() 67 | return resp[0] == 1 if resp is not None else False 68 | except Exception: # noqa: BLE001 69 | return False 70 | 71 | worker_num = get_xdist_worker_num() 72 | if worker_num is not None: 73 | name = f"{name}_{worker_num}" 74 | 75 | with docker_service.run( 76 | image=image, 77 | name=name, 78 | check=check, 79 | container_port=1521, 80 | timeout=60, 81 | env={ 82 | "ORACLE_PASSWORD": system_password, 83 | "APP_USER_PASSWORD": password, 84 | "APP_USER": user, 85 | }, 86 | ) as service: 87 | yield OracleService( 88 | host=service.host, 89 | port=service.port, 90 | system_password=system_password, 91 | user=user, 92 | password=password, 93 | service_name=service_name, 94 | ) 95 | 96 | 97 | @pytest.fixture(autouse=False, scope="session") 98 | def oracle_23ai_image() -> str: 99 | return "gvenzl/oracle-free:23-slim-faststart" 100 | 101 | 102 | @pytest.fixture(autouse=False, scope="session") 103 | def oracle_23ai_service_name() -> str: 104 | return "FREEPDB1" 105 | 106 | 107 | @pytest.fixture(autouse=False, scope="session") 108 | def oracle_18c_image() -> str: 109 | return "gvenzl/oracle-xe:18-slim-faststart" 110 | 111 | 112 | @pytest.fixture(autouse=False, scope="session") 113 | def oracle_18c_service_name() -> str: 114 | return "xepdb1" 115 | 116 | 117 | @pytest.fixture(autouse=False, scope="session") 118 | def oracle_23ai_service( 119 | docker_service: DockerService, oracle_23ai_image: str, oracle_23ai_service_name: str 120 | ) -> Generator[OracleService, None, None]: 121 | with _provide_oracle_service( 122 | image=oracle_23ai_image, 123 | name="oracle23ai", 124 | service_name=oracle_23ai_service_name, 125 | docker_service=docker_service, 126 | ) as service: 127 | yield service 128 | 129 | 130 | @pytest.fixture(autouse=False, scope="session") 131 | def oracle_18c_service( 132 | docker_service: DockerService, oracle_18c_image: str, oracle_18c_service_name: str 133 | ) -> Generator[OracleService, None, None]: 134 | with _provide_oracle_service( 135 | image=oracle_18c_image, 136 | name="oracle18c", 137 | service_name=oracle_18c_service_name, 138 | docker_service=docker_service, 139 | ) as service: 140 | yield service 141 | 142 | 143 | # alias to the latest 144 | @pytest.fixture(autouse=False, scope="session") 145 | def oracle_service(oracle_23ai_service: OracleService) -> OracleService: 146 | return oracle_23ai_service 147 | 148 | 149 | @pytest.fixture(autouse=False, scope="session") 150 | def oracle_18c_connection( 151 | oracle_18c_service: OracleService, 152 | ) -> Generator[oracledb.Connection, None, None]: 153 | with oracledb.connect( 154 | host=oracle_18c_service.host, 155 | port=oracle_18c_service.port, 156 | user=oracle_18c_service.user, 157 | service_name=oracle_18c_service.service_name, 158 | password=oracle_18c_service.password, 159 | ) as db_connection: 160 | yield db_connection 161 | 162 | 163 | @pytest.fixture(autouse=False, scope="session") 164 | def oracle_23ai_connection( 165 | oracle_23ai_service: OracleService, 166 | ) -> Generator[oracledb.Connection, None, None]: 167 | with oracledb.connect( 168 | host=oracle_23ai_service.host, 169 | port=oracle_23ai_service.port, 170 | user=oracle_23ai_service.user, 171 | service_name=oracle_23ai_service.service_name, 172 | password=oracle_23ai_service.password, 173 | ) as db_connection: 174 | yield db_connection 175 | 176 | 177 | @pytest.fixture(autouse=False, scope="session") 178 | def oracle_startup_connection(oracle_23ai_startup_connection: oracledb.Connection) -> oracledb.Connection: 179 | return oracle_23ai_startup_connection 180 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/redis.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import dataclasses 4 | from typing import TYPE_CHECKING 5 | 6 | import pytest 7 | from redis import Redis 8 | from redis.exceptions import ConnectionError as RedisConnectionError 9 | 10 | from pytest_databases.helpers import get_xdist_worker_num 11 | from pytest_databases.types import ServiceContainer, XdistIsolationLevel 12 | 13 | if TYPE_CHECKING: 14 | from collections.abc import Generator 15 | 16 | from pytest_databases._service import DockerService 17 | 18 | 19 | @dataclasses.dataclass 20 | class RedisService(ServiceContainer): 21 | db: int 22 | 23 | 24 | @pytest.fixture(scope="session") 25 | def xdist_redis_isolation_level() -> XdistIsolationLevel: 26 | return "database" 27 | 28 | 29 | def redis_responsive(service_container: ServiceContainer) -> bool: 30 | client = Redis(host=service_container.host, port=service_container.port) 31 | try: 32 | return client.ping() 33 | except (ConnectionError, RedisConnectionError): 34 | return False 35 | finally: 36 | client.close() 37 | 38 | 39 | @pytest.fixture(autouse=False, scope="session") 40 | def redis_port(redis_service: RedisService) -> int: 41 | return redis_service.port 42 | 43 | 44 | @pytest.fixture(autouse=False, scope="session") 45 | def redis_host(redis_service: RedisService) -> str: 46 | return redis_service.host 47 | 48 | 49 | @pytest.fixture(autouse=False, scope="session") 50 | def redis_image() -> str: 51 | return "redis:latest" 52 | 53 | 54 | @pytest.fixture(autouse=False, scope="session") 55 | def redis_service( 56 | docker_service: DockerService, 57 | redis_image: str, 58 | xdist_redis_isolation_level: XdistIsolationLevel, 59 | ) -> Generator[RedisService, None, None]: 60 | worker_num = get_xdist_worker_num() 61 | name = "redis" 62 | db = 0 63 | if worker_num is not None: 64 | if xdist_redis_isolation_level == "database": 65 | container_num = worker_num // 1 66 | name += f"_{container_num + 1}" 67 | db = worker_num 68 | else: 69 | name += f"_{worker_num + 1}" 70 | 71 | with docker_service.run( 72 | redis_image, 73 | check=redis_responsive, 74 | container_port=6379, 75 | name=name, 76 | transient=xdist_redis_isolation_level == "server", 77 | ) as service: 78 | yield RedisService(host=service.host, port=service.port, db=db) 79 | 80 | 81 | @pytest.fixture(autouse=False, scope="session") 82 | def dragonfly_image() -> str: 83 | return "docker.dragonflydb.io/dragonflydb/dragonfly" 84 | 85 | 86 | @pytest.fixture(autouse=False, scope="session") 87 | def dragonfly_service( 88 | docker_service: DockerService, 89 | dragonfly_image: str, 90 | xdist_redis_isolation_level: XdistIsolationLevel, 91 | ) -> Generator[RedisService, None, None]: 92 | worker_num = get_xdist_worker_num() 93 | name = "dragonfly" 94 | db = 0 95 | if worker_num is not None: 96 | if xdist_redis_isolation_level == "database": 97 | container_num = worker_num // 1 98 | name += f"_{container_num + 1}" 99 | db = worker_num 100 | else: 101 | name += f"_{worker_num + 1}" 102 | 103 | with docker_service.run( 104 | dragonfly_image, 105 | check=redis_responsive, 106 | container_port=6379, 107 | name=name, 108 | transient=xdist_redis_isolation_level == "server", 109 | ) as service: 110 | yield RedisService(host=service.host, port=service.port, db=db) 111 | 112 | 113 | @pytest.fixture(autouse=False, scope="session") 114 | def dragonfly_port(dragonfly_service: RedisService) -> int: 115 | return dragonfly_service.port 116 | 117 | 118 | @pytest.fixture(autouse=False, scope="session") 119 | def dragonfly_host(dragonfly_service: RedisService) -> str: 120 | return dragonfly_service.host 121 | 122 | 123 | @pytest.fixture(autouse=False, scope="session") 124 | def keydb_image() -> str: 125 | return "eqalpha/keydb" 126 | 127 | 128 | @pytest.fixture(autouse=False, scope="session") 129 | def keydb_service( 130 | docker_service: DockerService, 131 | keydb_image: str, 132 | xdist_redis_isolation_level: XdistIsolationLevel, 133 | ) -> Generator[RedisService, None, None]: 134 | worker_num = get_xdist_worker_num() 135 | name = "keydb" 136 | db = 0 137 | if worker_num is not None: 138 | if xdist_redis_isolation_level == "database": 139 | container_num = worker_num // 1 140 | name += f"_{container_num + 1}" 141 | db = worker_num 142 | else: 143 | name += f"_{worker_num + 1}" 144 | 145 | with docker_service.run( 146 | keydb_image, 147 | check=redis_responsive, 148 | container_port=6379, 149 | name=name, 150 | transient=xdist_redis_isolation_level == "server", 151 | ) as service: 152 | yield RedisService(host=service.host, port=service.port, db=db) 153 | 154 | 155 | @pytest.fixture(autouse=False, scope="session") 156 | def keydb_port(keydb_service: RedisService) -> int: 157 | return keydb_service.port 158 | 159 | 160 | @pytest.fixture(autouse=False, scope="session") 161 | def keydb_host(keydb_service: RedisService) -> str: 162 | return keydb_service.host 163 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/spanner.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass 4 | from typing import TYPE_CHECKING 5 | 6 | import pytest 7 | from google.api_core.client_options import ClientOptions 8 | from google.auth.credentials import AnonymousCredentials, Credentials 9 | from google.cloud import spanner 10 | 11 | from pytest_databases.helpers import get_xdist_worker_num 12 | from pytest_databases.types import ServiceContainer 13 | 14 | if TYPE_CHECKING: 15 | from collections.abc import Generator 16 | 17 | from pytest_databases._service import DockerService 18 | 19 | 20 | @pytest.fixture(scope="session") 21 | def spanner_image() -> str: 22 | return "gcr.io/cloud-spanner-emulator/emulator:latest" 23 | 24 | 25 | @dataclass 26 | class SpannerService(ServiceContainer): 27 | credentials: Credentials 28 | project: str 29 | database_name: str 30 | instance_name: str 31 | 32 | @property 33 | def endpoint(self) -> str: 34 | return f"{self.host}:{self.port}" 35 | 36 | @property 37 | def client_options(self) -> ClientOptions: 38 | return ClientOptions(api_endpoint=self.endpoint) 39 | 40 | 41 | @pytest.fixture(autouse=False, scope="session") 42 | def spanner_service(docker_service: DockerService, spanner_image: str) -> Generator[SpannerService, None, None]: 43 | with docker_service.run( 44 | image=spanner_image, 45 | name=f"pytest_databases_spanner_{get_xdist_worker_num() or 0}", 46 | container_port=9010, 47 | wait_for_log="gRPC server listening at", 48 | transient=True, 49 | ) as service: 50 | yield SpannerService( 51 | host=service.host, 52 | port=service.port, 53 | credentials=AnonymousCredentials(), 54 | project="emulator-test-project", 55 | instance_name="emulator-test-instance", 56 | database_name="emulator-test-database", 57 | ) 58 | 59 | 60 | @pytest.fixture(autouse=False, scope="session") 61 | def spanner_connection( 62 | spanner_service: SpannerService, 63 | ) -> Generator[spanner.Client, None, None]: 64 | client = spanner.Client( 65 | project=spanner_service.project, 66 | credentials=spanner_service.credentials, 67 | client_options=spanner_service.client_options, 68 | ) 69 | try: 70 | yield client 71 | finally: 72 | client.close() 73 | -------------------------------------------------------------------------------- /src/pytest_databases/docker/valkey.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import dataclasses 4 | from typing import TYPE_CHECKING, cast 5 | 6 | import pytest 7 | from valkey import Valkey 8 | from valkey.exceptions import ConnectionError as ValkeyConnectionError 9 | 10 | from pytest_databases.helpers import get_xdist_worker_num 11 | from pytest_databases.types import ServiceContainer, XdistIsolationLevel 12 | 13 | if TYPE_CHECKING: 14 | from collections.abc import Generator 15 | 16 | from pytest_databases._service import DockerService 17 | 18 | 19 | @dataclasses.dataclass 20 | class ValkeyService(ServiceContainer): 21 | db: int 22 | 23 | 24 | @pytest.fixture(scope="session") 25 | def xdist_valkey_isolation_level() -> XdistIsolationLevel: 26 | return "database" 27 | 28 | 29 | def valkey_responsive(service_container: ServiceContainer) -> bool: 30 | client = Valkey(host=service_container.host, port=service_container.port) 31 | try: 32 | return cast("bool", client.ping()) 33 | except (ConnectionError, ValkeyConnectionError): 34 | return False 35 | finally: 36 | client.close() 37 | 38 | 39 | @pytest.fixture(autouse=False, scope="session") 40 | def valkey_port(valkey_service: ValkeyService) -> int: 41 | return valkey_service.port 42 | 43 | 44 | @pytest.fixture(autouse=False, scope="session") 45 | def valkey_host(valkey_service: ValkeyService) -> str: 46 | return valkey_service.host 47 | 48 | 49 | @pytest.fixture(autouse=False, scope="session") 50 | def valkey_image() -> str: 51 | return "valkey/valkey:latest" 52 | 53 | 54 | @pytest.fixture(autouse=False, scope="session") 55 | def valkey_service( 56 | docker_service: DockerService, 57 | valkey_image: str, 58 | xdist_valkey_isolation_level: XdistIsolationLevel, 59 | ) -> Generator[ValkeyService, None, None]: 60 | worker_num = get_xdist_worker_num() 61 | name = "valkey" 62 | db = 0 63 | if worker_num is not None: 64 | if xdist_valkey_isolation_level == "database": 65 | container_num = worker_num // 1 66 | name += f"_{container_num + 1}" 67 | db = worker_num 68 | else: 69 | name += f"_{worker_num + 1}" 70 | 71 | with docker_service.run( 72 | valkey_image, 73 | check=valkey_responsive, 74 | container_port=6379, 75 | name=name, 76 | transient=xdist_valkey_isolation_level == "server", 77 | ) as service: 78 | yield ValkeyService(host=service.host, port=service.port, db=db) 79 | -------------------------------------------------------------------------------- /src/pytest_databases/helpers.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import hashlib 4 | import os 5 | import platform 6 | from typing import Literal 7 | 8 | 9 | def simple_string_hash(string_to_hash: str) -> str: 10 | """Generates a short hash based on a string. 11 | 12 | Args: 13 | string_to_hash: The string to hash. 14 | 15 | Returns: 16 | A short hash string. 17 | """ 18 | 19 | string_bytes = string_to_hash.encode("utf-8") 20 | hasher = hashlib.sha256() 21 | hasher.update(string_bytes) 22 | digest = hasher.digest() 23 | hex_string = digest.hex() 24 | return hex_string[:12] 25 | 26 | 27 | def get_xdist_worker_id() -> str | None: 28 | return os.getenv("PYTEST_XDIST_WORKER") 29 | 30 | 31 | def get_xdist_worker_num() -> int | None: 32 | worker_id = get_xdist_worker_id() 33 | if worker_id is None or worker_id == "master": 34 | return None 35 | return int(worker_id.replace("gw", "")) 36 | 37 | 38 | def get_xdist_worker_count() -> int: 39 | return int(os.getenv("PYTEST_XDIST_WORKER_COUNT", "0")) 40 | 41 | 42 | def get_cpu_architecture() -> Literal["x86_64", "arm", "unknown"]: 43 | """Detects the CPU architecture. 44 | 45 | This function utilizes Python's `platform` module to reliably determine 46 | if the program is running on an x86-64 (also known as AMD64) or an ARM 47 | (including aarch64/arm64) CPU architecture. It aims to be compatible 48 | across all operating systems supported by Python (Windows, Linux, macOS). 49 | 50 | Returns: 51 | Literal['x86_64', 'arm', 'unknown']: A string representing the 52 | detected architecture. It returns 'x86_64' for 64-bit x86 53 | processors, 'arm' for ARM-based processors (both 32-bit and 64-bit), 54 | and 'unknown' if the architecture is neither or cannot be 55 | confidently determined. 56 | 57 | Examples: 58 | >>> # On an Intel-based Mac or typical Linux/Windows PC: 59 | >>> # get_cpu_architecture() 60 | 'x86_64' 61 | >>> # On an Apple Silicon Mac or Raspberry Pi 4: 62 | >>> # get_cpu_architecture() 63 | 'arm' 64 | """ 65 | machine: str = platform.machine().lower() 66 | 67 | if machine in {"x86_64", "amd64"}: 68 | return "x86_64" 69 | if machine.startswith(("arm", "aarch")): 70 | return "arm" 71 | # Add checks for other potential identifiers if needed, though 72 | # the above cover the vast majority of cases for these two archs. 73 | # Consider checking platform.processor() as a fallback 74 | # or other system-specific methods if higher reliability 75 | # on obscure platforms is required. For most uses, 76 | # platform.machine() is sufficient and standard. 77 | return "unknown" 78 | -------------------------------------------------------------------------------- /src/pytest_databases/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/litestar-org/pytest-databases/461f8982df55a5214068082ea72170c06513363d/src/pytest_databases/py.typed -------------------------------------------------------------------------------- /src/pytest_databases/types.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import dataclasses 4 | from typing import Literal 5 | 6 | 7 | @dataclasses.dataclass 8 | class ServiceContainer: 9 | host: str 10 | port: int 11 | 12 | 13 | XdistIsolationLevel = Literal["database", "server"] 14 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/litestar-org/pytest-databases/461f8982df55a5214068082ea72170c06513363d/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import platform 4 | 5 | import pytest 6 | 7 | pytestmark = pytest.mark.anyio 8 | 9 | 10 | pytest_plugins = [ 11 | "pytest_databases.docker", 12 | "pytester", 13 | ] 14 | 15 | PLATFORM_PROCESSOR = platform.processor() 16 | PLATFORM_SYSTEM = platform.system() 17 | -------------------------------------------------------------------------------- /tests/test_azure_blob.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | pytest_plugins = [ 4 | "pytest_databases.docker.azure_blob", 5 | ] 6 | 7 | 8 | def test_default_no_xdist(pytester: pytest.Pytester) -> None: 9 | pytester.makepyfile(""" 10 | import pytest 11 | from azure.storage.blob import ContainerClient 12 | 13 | pytest_plugins = [ 14 | "pytest_databases.docker.azure_blob", 15 | ] 16 | 17 | 18 | def test_one(azure_blob_container_client: ContainerClient) -> None: 19 | azure_blob_container_client.create_container() 20 | 21 | 22 | def test_two(azure_blob_container_client: ContainerClient) -> None: 23 | assert azure_blob_container_client.exists() 24 | """) 25 | result = pytester.runpytest() 26 | result.assert_outcomes(passed=2) 27 | 28 | 29 | def test_xdist_isolate_server(pytester: pytest.Pytester) -> None: 30 | pytester.makepyfile(""" 31 | import pytest 32 | from azure.storage.blob import ContainerClient 33 | 34 | pytest_plugins = [ 35 | "pytest_databases.docker.azure_blob", 36 | ] 37 | 38 | 39 | @pytest.fixture(scope="session") 40 | def azure_blob_xdist_isolation_level(): 41 | return "server" 42 | 43 | 44 | def test_one(azure_blob_container_client: ContainerClient) -> None: 45 | assert not azure_blob_container_client.exists() 46 | azure_blob_container_client.create_container() 47 | 48 | 49 | def test_two(azure_blob_container_client: ContainerClient) -> None: 50 | assert not azure_blob_container_client.exists() 51 | azure_blob_container_client.create_container() 52 | """) 53 | result = pytester.runpytest("-n", "2") 54 | result.assert_outcomes(passed=2) 55 | 56 | 57 | def test_xdist_isolate_database(pytester: pytest.Pytester) -> None: 58 | pytester.makepyfile(""" 59 | from azure.storage.blob import ContainerClient 60 | from pytest_databases.helpers import get_xdist_worker_num 61 | 62 | pytest_plugins = [ 63 | "pytest_databases.docker.azure_blob", 64 | ] 65 | 66 | 67 | def test_one(azure_blob_container_client: ContainerClient, azure_blob_default_container_name: str) -> None: 68 | assert not azure_blob_container_client.exists() 69 | azure_blob_container_client.create_container() 70 | assert azure_blob_container_client.container_name == azure_blob_default_container_name 71 | assert azure_blob_container_client.account_name == f"test_account_{get_xdist_worker_num()}" 72 | 73 | 74 | def test_two(azure_blob_container_client: ContainerClient, azure_blob_default_container_name: str) -> None: 75 | assert not azure_blob_container_client.exists() 76 | azure_blob_container_client.create_container() 77 | assert azure_blob_container_client.container_name == azure_blob_default_container_name 78 | assert azure_blob_container_client.account_name == f"test_account_{get_xdist_worker_num()}" 79 | 80 | """) 81 | result = pytester.runpytest("-n", "2") 82 | result.assert_outcomes(passed=2) 83 | -------------------------------------------------------------------------------- /tests/test_bigquery.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | if TYPE_CHECKING: 6 | import pytest 7 | 8 | 9 | def test_service_fixture(pytester: pytest.Pytester) -> None: 10 | pytester.makepyfile(""" 11 | from google.cloud import bigquery 12 | 13 | pytest_plugins = ["pytest_databases.docker.bigquery"] 14 | 15 | def test(bigquery_service) -> None: 16 | client = bigquery.Client( 17 | project=bigquery_service.project, 18 | client_options=bigquery_service.client_options, 19 | credentials=bigquery_service.credentials, 20 | ) 21 | 22 | job = client.query(query="SELECT 1 as one") 23 | 24 | resp = list(job.result()) 25 | assert resp[0].one == 1 26 | """) 27 | 28 | result = pytester.runpytest() 29 | result.assert_outcomes(passed=1) 30 | 31 | 32 | def test_client_fixture(pytester: pytest.Pytester) -> None: 33 | pytester.makepyfile(""" 34 | from google.cloud import bigquery 35 | 36 | pytest_plugins = ["pytest_databases.docker.bigquery"] 37 | 38 | def test(bigquery_client) -> None: 39 | assert isinstance(bigquery_client, bigquery.Client) 40 | """) 41 | 42 | result = pytester.runpytest() 43 | result.assert_outcomes(passed=1) 44 | 45 | 46 | def test_xdist(pytester: pytest.Pytester) -> None: 47 | pytester.makepyfile(""" 48 | from google.cloud import bigquery 49 | 50 | pytest_plugins = ["pytest_databases.docker.bigquery"] 51 | 52 | def test_one(bigquery_client, bigquery_service) -> None: 53 | bigquery_client.query(f"CREATE TABLE `{bigquery_service.dataset}.test` AS select 1 as the_value") 54 | 55 | def test_two(bigquery_client, bigquery_service) -> None: 56 | bigquery_client.query(f"CREATE TABLE `{bigquery_service.dataset}.test` AS select 1 as the_value") 57 | """) 58 | 59 | result = pytester.runpytest("-n", "2") 60 | result.assert_outcomes(passed=2) 61 | -------------------------------------------------------------------------------- /tests/test_cockroachdb.py: -------------------------------------------------------------------------------- 1 | # from __future__ import annotations 2 | # 3 | # from typing import TYPE_CHECKING 4 | # 5 | # import psycopg 6 | # 7 | # if TYPE_CHECKING: 8 | # from pytest_databases.docker.cockroachdb import CockroachDBService 9 | # 10 | # pytest_plugins = [ 11 | # "pytest_databases.docker.cockroachdb", 12 | # ] 13 | # 14 | # 15 | # def test_cockroachdb_default_config(cockroachdb_driver_opts: dict[str, str]) -> None: 16 | # assert cockroachdb_driver_opts == {"sslmode": "disable"} 17 | # 18 | # 19 | # def test_cockroachdb_service( 20 | # cockroachdb_service: CockroachDBService, 21 | # ) -> None: 22 | # opts = "&".join(f"{k}={v}" for k, v in cockroachdb_service.driver_opts.items()) 23 | # with psycopg.connect( 24 | # f"postgresql://root@{cockroachdb_service.host}:{cockroachdb_service.port}/{cockroachdb_service.database}?{opts}" 25 | # ) as conn: 26 | # conn.execute("CREATE TABLE if not exists simple_table as SELECT 1 as the_value") 27 | # result = conn.execute("select * from simple_table").fetchone() 28 | # assert result is not None and result[0] == 1 29 | # 30 | # 31 | # def test_cockroachdb_services_after_start( 32 | # cockroachdb_startup_connection: psycopg.Connection, 33 | # ) -> None: 34 | # cockroachdb_startup_connection.execute("CREATE TABLE if not exists simple_table as SELECT 1 as the_value") 35 | # result = cockroachdb_startup_connection.execute("select * from simple_table").fetchone() 36 | # assert result is not None and result[0] == 1 37 | 38 | 39 | from __future__ import annotations 40 | 41 | from typing import TYPE_CHECKING 42 | 43 | if TYPE_CHECKING: 44 | import pytest 45 | 46 | 47 | def test_service_fixture(pytester: pytest.Pytester) -> None: 48 | pytester.makepyfile(""" 49 | import pytest 50 | import psycopg 51 | from pytest_databases.docker.postgres import _make_connection_string # noqa: PLC2701 52 | 53 | pytest_plugins = ["pytest_databases.docker.cockroachdb"] 54 | 55 | def test(cockroachdb_service) -> None: 56 | opts = "&".join(f"{k}={v}" for k, v in cockroachdb_service.driver_opts.items()) 57 | with psycopg.connect( 58 | f"postgresql://root@{cockroachdb_service.host}:{cockroachdb_service.port}/{cockroachdb_service.database}?{opts}" 59 | ) as conn: 60 | db_open = conn.execute("SELECT 1").fetchone() 61 | assert db_open is not None and db_open[0] == 1 62 | """) 63 | 64 | result = pytester.runpytest() 65 | result.assert_outcomes(passed=1) 66 | 67 | 68 | def test_startup_connection_fixture(pytester: pytest.Pytester) -> None: 69 | pytester.makepyfile(""" 70 | import pytest 71 | import psycopg 72 | from pytest_databases.docker.postgres import _make_connection_string # noqa: PLC2701 73 | 74 | 75 | pytest_plugins = ["pytest_databases.docker.cockroachdb"] 76 | 77 | def test(cockroachdb_connection) -> None: 78 | cockroachdb_connection.execute("CREATE TABLE if not exists simple_table as SELECT 1") 79 | result = cockroachdb_connection.execute("select * from simple_table").fetchone() 80 | assert result is not None and result[0] == 1 81 | """) 82 | 83 | result = pytester.runpytest() 84 | result.assert_outcomes(passed=1) 85 | 86 | 87 | def test_xdist_isolate_database(pytester: pytest.Pytester) -> None: 88 | pytester.makepyfile(""" 89 | import pytest 90 | import psycopg 91 | from pytest_databases.docker.postgres import _make_connection_string 92 | 93 | pytest_plugins = ["pytest_databases.docker.cockroachdb"] 94 | 95 | def test_one(cockroachdb_service) -> None: 96 | opts = "&".join(f"{k}={v}" for k, v in cockroachdb_service.driver_opts.items()) 97 | with psycopg.connect( 98 | f"postgresql://root@{cockroachdb_service.host}:{cockroachdb_service.port}/{cockroachdb_service.database}?{opts}" 99 | ) as conn: 100 | conn.execute("CREATE TABLE foo AS SELECT 1") 101 | 102 | def test_two(cockroachdb_service) -> None: 103 | opts = "&".join(f"{k}={v}" for k, v in cockroachdb_service.driver_opts.items()) 104 | with psycopg.connect( 105 | f"postgresql://root@{cockroachdb_service.host}:{cockroachdb_service.port}/{cockroachdb_service.database}?{opts}" 106 | ) as conn: 107 | conn.execute("CREATE TABLE foo AS SELECT 1") 108 | """) 109 | 110 | result = pytester.runpytest_subprocess("-n", "2") 111 | result.assert_outcomes(passed=2) 112 | 113 | 114 | def test_xdist_isolate_server(pytester: pytest.Pytester) -> None: 115 | pytester.makepyfile(""" 116 | import pytest 117 | import psycopg 118 | from pytest_databases.docker.postgres import _make_connection_string 119 | 120 | pytest_plugins = ["pytest_databases.docker.cockroachdb"] 121 | 122 | @pytest.fixture(scope="session") 123 | def xdist_cockroachdb_isolation_level(): 124 | return "server" 125 | 126 | def test_one(cockroachdb_service) -> None: 127 | opts = "&".join(f"{k}={v}" for k, v in cockroachdb_service.driver_opts.items()) 128 | with psycopg.connect( 129 | f"postgresql://root@{cockroachdb_service.host}:{cockroachdb_service.port}/{cockroachdb_service.database}?{opts}" 130 | ) as conn: 131 | conn.execute("CREATE DATABASE foo") 132 | 133 | def test_two(cockroachdb_service) -> None: 134 | opts = "&".join(f"{k}={v}" for k, v in cockroachdb_service.driver_opts.items()) 135 | with psycopg.connect( 136 | f"postgresql://root@{cockroachdb_service.host}:{cockroachdb_service.port}/{cockroachdb_service.database}?{opts}" 137 | ) as conn: 138 | conn.execute("CREATE DATABASE foo") 139 | """) 140 | 141 | result = pytester.runpytest_subprocess("-n", "2") 142 | result.assert_outcomes(passed=2) 143 | -------------------------------------------------------------------------------- /tests/test_elasticsearch.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | if TYPE_CHECKING: 6 | import pytest 7 | 8 | 9 | def test_elasticsearch_7(pytester: pytest.Pytester) -> None: 10 | pytester.makepyfile(""" 11 | from elasticsearch7 import Elasticsearch 12 | 13 | pytest_plugins = ["pytest_databases.docker.elastic_search"] 14 | 15 | def test(elasticsearch_7_service) -> None: 16 | with Elasticsearch( 17 | hosts=[ 18 | { 19 | "host": elasticsearch_7_service.host, 20 | "port": elasticsearch_7_service.port, 21 | "scheme": elasticsearch_7_service.scheme, 22 | } 23 | ], 24 | verify_certs=False, 25 | http_auth=(elasticsearch_7_service.user, elasticsearch_7_service.password), 26 | ) as client: 27 | info = client.info() 28 | 29 | assert info["version"]["number"] == "7.17.19" 30 | """) 31 | 32 | result = pytester.runpytest() 33 | result.assert_outcomes(passed=1) 34 | 35 | 36 | def test_elasticsearch_8(pytester: pytest.Pytester) -> None: 37 | pytester.makepyfile(""" 38 | from elasticsearch7 import Elasticsearch 39 | 40 | pytest_plugins = ["pytest_databases.docker.elastic_search"] 41 | 42 | def test(elasticsearch_8_service) -> None: 43 | with Elasticsearch( 44 | hosts=[ 45 | { 46 | "host": elasticsearch_8_service.host, 47 | "port": elasticsearch_8_service.port, 48 | "scheme": elasticsearch_8_service.scheme, 49 | } 50 | ], 51 | verify_certs=False, 52 | basic_auth=(elasticsearch_8_service.user, elasticsearch_8_service.password), 53 | ) as client: 54 | info = client.info() 55 | 56 | assert info["version"]["number"] == "8.13.0" 57 | """) 58 | 59 | result = pytester.runpytest() 60 | result.assert_outcomes(passed=1) 61 | -------------------------------------------------------------------------------- /tests/test_mariadb.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "service_fixture", 8 | [ 9 | "mariadb_service", 10 | "mariadb_113_service", 11 | ], 12 | ) 13 | def test_service_fixture(pytester: pytest.Pytester, service_fixture: str) -> None: 14 | pytester.makepyfile(f""" 15 | import mariadb 16 | 17 | pytest_plugins = ["pytest_databases.docker.mariadb"] 18 | 19 | def test({service_fixture}): 20 | with mariadb.connect( 21 | host={service_fixture}.host, 22 | port={service_fixture}.port, 23 | user={service_fixture}.user, 24 | database={service_fixture}.db, 25 | password={service_fixture}.password, 26 | ) as conn, conn.cursor() as cursor: 27 | cursor.execute("select 1 as is_available") 28 | resp = cursor.fetchone() 29 | assert resp is not None and resp[0] == 1 30 | """) 31 | 32 | result = pytester.runpytest("-vv") 33 | result.assert_outcomes(passed=1) 34 | 35 | 36 | @pytest.mark.parametrize( 37 | "connection_fixture", 38 | [ 39 | "mariadb_connection", 40 | "mariadb_113_connection", 41 | ], 42 | ) 43 | def test_connection_fixture(pytester: pytest.Pytester, connection_fixture: str) -> None: 44 | pytester.makepyfile(f""" 45 | pytest_plugins = ["pytest_databases.docker.mariadb"] 46 | 47 | def test({connection_fixture}): 48 | with {connection_fixture}.cursor() as cursor: 49 | cursor.execute("CREATE TABLE if not exists simple_table as SELECT 1 as the_value") 50 | cursor.execute("select * from simple_table") 51 | result = cursor.fetchall() 52 | assert result is not None and result[0][0] == 1 53 | """) 54 | 55 | result = pytester.runpytest("-vv") 56 | result.assert_outcomes(passed=1) 57 | 58 | 59 | def test_xdist_isolate_database(pytester: pytest.Pytester) -> None: 60 | pytester.makepyfile(""" 61 | pytest_plugins = ["pytest_databases.docker.mariadb"] 62 | 63 | def test_1(mariadb_113_connection): 64 | with mariadb_113_connection.cursor() as cursor: 65 | cursor.execute("CREATE TABLE simple_table as SELECT 1 as the_value;") 66 | 67 | def test_2(mariadb_113_connection): 68 | with mariadb_113_connection.cursor() as cursor: 69 | cursor.execute("CREATE TABLE simple_table as SELECT 1 as the_value;") 70 | """) 71 | 72 | result = pytester.runpytest("-n", "2") 73 | result.assert_outcomes(passed=2) 74 | 75 | 76 | def test_xdist_isolate_server(pytester: pytest.Pytester) -> None: 77 | pytester.makepyfile(""" 78 | import pytest 79 | pytest_plugins = ["pytest_databases.docker.mariadb"] 80 | 81 | @pytest.fixture(scope="session") 82 | def xdist_mariadb_isolation_level(): 83 | return "server" 84 | 85 | def test_1(mariadb_113_connection): 86 | with mariadb_113_connection.cursor() as cursor: 87 | cursor.execute("CREATE DATABASE db_test") 88 | 89 | def test_2(mariadb_113_connection): 90 | with mariadb_113_connection.cursor() as cursor: 91 | cursor.execute("CREATE DATABASE db_test") 92 | """) 93 | 94 | result = pytester.runpytest("-n", "2") 95 | result.assert_outcomes(passed=2) 96 | -------------------------------------------------------------------------------- /tests/test_minio.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | pytest_plugins = [ 4 | "pytest_databases.docker.minio", 5 | ] 6 | 7 | 8 | def test_default_no_xdist(pytester: pytest.Pytester) -> None: 9 | pytester.makepyfile(""" 10 | import pytest 11 | from minio import Minio 12 | 13 | pytest_plugins = [ 14 | "pytest_databases.docker.minio", 15 | ] 16 | 17 | 18 | def test_one(minio_client: Minio) -> None: 19 | minio_client.make_bucket("pytest-databases-test-no-xdist") 20 | assert minio_client.bucket_exists("pytest-databases-test-no-xdist") 21 | 22 | 23 | def test_two(minio_client: Minio) -> None: 24 | assert minio_client.bucket_exists("pytest-databases-test-no-xdist") 25 | """) 26 | result = pytester.runpytest() 27 | result.assert_outcomes(passed=2) 28 | 29 | 30 | def test_xdist_isolate_server(pytester: pytest.Pytester) -> None: 31 | pytester.makepyfile(""" 32 | import pytest 33 | from minio import Minio 34 | from pytest_databases.helpers import get_xdist_worker_num 35 | pytest_plugins = [ 36 | "pytest_databases.docker.minio", 37 | ] 38 | 39 | 40 | @pytest.fixture(scope="session") 41 | def xdist_minio_isolation_level(): 42 | return "server" 43 | 44 | 45 | def test_one(minio_client: Minio, minio_default_bucket_name: str) -> None: 46 | assert minio_client.bucket_exists(minio_default_bucket_name) 47 | isolated_bucket_name = f"{minio_default_bucket_name}-isolated-{get_xdist_worker_num()}" 48 | assert not minio_client.bucket_exists(isolated_bucket_name) 49 | minio_client.make_bucket(isolated_bucket_name) 50 | assert minio_client.bucket_exists(isolated_bucket_name) 51 | 52 | 53 | def test_two(minio_client: Minio, minio_default_bucket_name: str) -> None: 54 | assert minio_client.bucket_exists(minio_default_bucket_name) 55 | isolated_bucket_name = f"{minio_default_bucket_name}-isolated-{get_xdist_worker_num()}" 56 | assert not minio_client.bucket_exists(isolated_bucket_name) 57 | minio_client.make_bucket(isolated_bucket_name) 58 | assert minio_client.bucket_exists(isolated_bucket_name) 59 | """) 60 | result = pytester.runpytest("-n", "2") 61 | result.assert_outcomes(passed=2) 62 | 63 | 64 | def test_xdist_isolate_database(pytester: pytest.Pytester) -> None: 65 | pytester.makepyfile(""" 66 | from minio import Minio 67 | from pytest_databases.helpers import get_xdist_worker_num 68 | pytest_plugins = [ 69 | "pytest_databases.docker.minio", 70 | ] 71 | 72 | 73 | def test_one(minio_client: Minio, minio_default_bucket_name: str) -> None: 74 | assert minio_client.bucket_exists(minio_default_bucket_name) 75 | isolated_bucket_name = f"{minio_default_bucket_name}-isolated-{get_xdist_worker_num()}" 76 | assert not minio_client.bucket_exists(isolated_bucket_name) 77 | minio_client.make_bucket(isolated_bucket_name) 78 | assert minio_client.bucket_exists(isolated_bucket_name) 79 | 80 | def test_two(minio_client: Minio, minio_default_bucket_name: str) -> None: 81 | assert minio_client.bucket_exists(minio_default_bucket_name) 82 | isolated_bucket_name = f"{minio_default_bucket_name}-isolated-{get_xdist_worker_num()}" 83 | assert not minio_client.bucket_exists(isolated_bucket_name) 84 | minio_client.make_bucket(isolated_bucket_name) 85 | assert minio_client.bucket_exists(isolated_bucket_name) 86 | 87 | """) 88 | result = pytester.runpytest("-n", "2") 89 | result.assert_outcomes(passed=2) 90 | -------------------------------------------------------------------------------- /tests/test_mssql.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | from tests.conftest import PLATFORM_PROCESSOR 6 | 7 | 8 | @pytest.mark.parametrize( 9 | "service_fixture", 10 | [ 11 | "mssql_service", 12 | ], 13 | ) 14 | @pytest.mark.skipif(PLATFORM_PROCESSOR == "arm", reason="ARM bug. https://github.com/pymssql/pymssql/issues/822") 15 | def test_service_fixture(pytester: pytest.Pytester, service_fixture: str) -> None: 16 | pytester.makepyfile(f""" 17 | import pymssql 18 | pytest_plugins = ["pytest_databases.docker.mssql"] 19 | 20 | def test({service_fixture}): 21 | conn = pymssql.connect( 22 | host={service_fixture}.host, 23 | port=str({service_fixture}.port), 24 | database={service_fixture}.database, 25 | user={service_fixture}.user, 26 | password={service_fixture}.password, 27 | timeout=2, 28 | ) 29 | with conn.cursor() as cursor: 30 | cursor.execute("select 1 as is_available") 31 | resp = cursor.fetchone() 32 | return resp[0] == 1 if resp is not None else False 33 | """) 34 | 35 | result = pytester.runpytest("-vv") 36 | result.assert_outcomes(passed=1) 37 | 38 | 39 | @pytest.mark.parametrize( 40 | "connection_fixture", 41 | [ 42 | "mssql_connection", 43 | ], 44 | ) 45 | @pytest.mark.skipif(PLATFORM_PROCESSOR == "arm", reason="ARM bug. https://github.com/pymssql/pymssql/issues/822") 46 | def test_connection_fixture(pytester: pytest.Pytester, connection_fixture: str) -> None: 47 | pytester.makepyfile(f""" 48 | import pymssql 49 | pytest_plugins = ["pytest_databases.docker.mssql"] 50 | 51 | def test({connection_fixture}): 52 | with {connection_fixture}.cursor() as cursor: 53 | cursor.execute("CREATE view simple_table as SELECT 1 as the_value") 54 | cursor.execute("select * from simple_table") 55 | result = cursor.fetchall() 56 | assert bool(result is not None and result[0][0] == 1) 57 | cursor.execute("drop view simple_table") 58 | 59 | """) 60 | 61 | result = pytester.runpytest("-vv") 62 | result.assert_outcomes(passed=1) 63 | 64 | 65 | @pytest.mark.skipif(PLATFORM_PROCESSOR == "arm", reason="ARM bug. https://github.com/pymssql/pymssql/issues/822") 66 | def test_xdist_isolate_database(pytester: pytest.Pytester) -> None: 67 | pytester.makepyfile(""" 68 | import pymssql 69 | pytest_plugins = ["pytest_databases.docker.mssql"] 70 | 71 | def test_1(mssql_connection): 72 | with mssql_connection.cursor() as cursor: 73 | cursor.execute("CREATE view simple_table as SELECT 1 as the_value;") 74 | 75 | def test_2(mssql_connection): 76 | with mssql_connection.cursor() as cursor: 77 | cursor.execute("CREATE view simple_table as SELECT 1 as the_value;") 78 | """) 79 | 80 | result = pytester.runpytest("-n", "2", "-vv") 81 | result.assert_outcomes(passed=2) 82 | 83 | 84 | @pytest.mark.skipif(PLATFORM_PROCESSOR == "arm", reason="ARM bug. https://github.com/pymssql/pymssql/issues/822") 85 | def test_xdist_isolate_server(pytester: pytest.Pytester) -> None: 86 | pytester.makepyfile(""" 87 | import pymssql 88 | import pytest 89 | pytest_plugins = ["pytest_databases.docker.mssql"] 90 | 91 | @pytest.fixture(scope="session") 92 | def xdist_mssql_isolation_level(): 93 | return "server" 94 | 95 | def test_1(mssql_service): 96 | with pymssql.connect( 97 | host=mssql_service.host, 98 | port=str(mssql_service.port), 99 | database=mssql_service.database, 100 | user=mssql_service.user, 101 | password=mssql_service.password, 102 | timeout=2, 103 | autocommit=True, 104 | ) as conn, conn.cursor() as cursor: 105 | cursor.execute("CREATE DATABASE db_test") 106 | 107 | def test_2(mssql_service): 108 | with pymssql.connect( 109 | host=mssql_service.host, 110 | port=str(mssql_service.port), 111 | database=mssql_service.database, 112 | user=mssql_service.user, 113 | password=mssql_service.password, 114 | timeout=2, 115 | autocommit=True, 116 | ) as conn, conn.cursor() as cursor: 117 | cursor.execute("CREATE DATABASE db_test") 118 | """) 119 | 120 | result = pytester.runpytest("-n", "2") 121 | result.assert_outcomes(passed=2) 122 | -------------------------------------------------------------------------------- /tests/test_mysql.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "service_fixture", 8 | [ 9 | "mysql_8_service", 10 | "mysql_56_service", 11 | "mysql_57_service", 12 | ], 13 | ) 14 | def test_service_fixture(pytester: pytest.Pytester, service_fixture: str) -> None: 15 | pytester.makepyfile(f""" 16 | import mysql.connector 17 | pytest_plugins = ["pytest_databases.docker.mysql"] 18 | 19 | def test({service_fixture}): 20 | with mysql.connector.connect( 21 | host={service_fixture}.host, 22 | port={service_fixture}.port, 23 | user={service_fixture}.user, 24 | database={service_fixture}.db, 25 | password={service_fixture}.password, 26 | ) as conn, conn.cursor() as cursor: 27 | cursor.execute("select 1 as is_available") 28 | resp = cursor.fetchone() 29 | assert resp is not None and resp[0] == 1 30 | """) 31 | 32 | result = pytester.runpytest("-vv") 33 | result.assert_outcomes(passed=1) 34 | 35 | 36 | @pytest.mark.parametrize( 37 | "connection_fixture", 38 | [ 39 | "mysql_56_connection", 40 | "mysql_57_connection", 41 | ], 42 | ) 43 | def test_connection_fixture(pytester: pytest.Pytester, connection_fixture: str) -> None: 44 | pytester.makepyfile(f""" 45 | pytest_plugins = ["pytest_databases.docker.mysql"] 46 | 47 | def test({connection_fixture}): 48 | with {connection_fixture}.cursor() as cursor: 49 | cursor.execute("CREATE TABLE if not exists simple_table as SELECT 1 as the_value") 50 | cursor.execute("select * from simple_table") 51 | result = cursor.fetchall() 52 | assert result is not None and result[0][0] == 1 53 | """) 54 | 55 | result = pytester.runpytest("-vv") 56 | result.assert_outcomes(passed=1) 57 | 58 | 59 | def test_xdist_isolate_database(pytester: pytest.Pytester) -> None: 60 | pytester.makepyfile(""" 61 | pytest_plugins = ["pytest_databases.docker.mysql"] 62 | 63 | def test_1(mysql_56_connection): 64 | with mysql_56_connection.cursor() as cursor: 65 | cursor.execute("CREATE TABLE simple_table as SELECT 1 as the_value;") 66 | 67 | def test_2(mysql_56_connection): 68 | with mysql_56_connection.cursor() as cursor: 69 | cursor.execute("CREATE TABLE simple_table as SELECT 1 as the_value;") 70 | """) 71 | 72 | result = pytester.runpytest("-n", "2") 73 | result.assert_outcomes(passed=2) 74 | 75 | 76 | def test_xdist_isolate_server(pytester: pytest.Pytester) -> None: 77 | pytester.makepyfile(""" 78 | import pytest 79 | pytest_plugins = ["pytest_databases.docker.mysql"] 80 | 81 | @pytest.fixture(scope="session") 82 | def xdist_mysql_isolation_level(): 83 | return "server" 84 | 85 | def test_1(mysql_56_connection): 86 | with mysql_56_connection.cursor() as cursor: 87 | cursor.execute("CREATE DATABASE db_test") 88 | 89 | def test_2(mysql_56_connection): 90 | with mysql_56_connection.cursor() as cursor: 91 | cursor.execute("CREATE DATABASE db_test") 92 | """) 93 | 94 | result = pytester.runpytest("-n", "2") 95 | result.assert_outcomes(passed=2) 96 | -------------------------------------------------------------------------------- /tests/test_oracle.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "service_fixture", 8 | [ 9 | "oracle_18c_service", 10 | "oracle_23ai_service", 11 | ], 12 | ) 13 | def test_service_fixture(pytester: pytest.Pytester, service_fixture: str) -> None: 14 | pytester.makepyfile(f""" 15 | import oracledb 16 | pytest_plugins = ["pytest_databases.docker.oracle"] 17 | 18 | def test({service_fixture}): 19 | conn = oracledb.connect( 20 | user={service_fixture}.user, 21 | password={service_fixture}.password, 22 | service_name={service_fixture}.service_name, 23 | host={service_fixture}.host, 24 | port={service_fixture}.port, 25 | ) 26 | with conn.cursor() as cur: 27 | cur.execute("SELECT 1 FROM dual") 28 | res = cur.fetchone()[0] 29 | assert res == 1 30 | """) 31 | 32 | result = pytester.runpytest() 33 | result.assert_outcomes(passed=1) 34 | 35 | 36 | @pytest.mark.parametrize("connection_fixture", ["oracle_18c_connection", "oracle_23ai_connection"]) 37 | def test_connection_fixture(pytester: pytest.Pytester, connection_fixture: str) -> None: 38 | pytester.makepyfile(f""" 39 | import oracledb 40 | pytest_plugins = ["pytest_databases.docker.oracle"] 41 | 42 | def test({connection_fixture}): 43 | with {connection_fixture}.cursor() as cursor: 44 | cursor.execute("CREATE or replace view simple_table as SELECT 1 as the_value from dual") 45 | cursor.execute("select * from simple_table") 46 | result = cursor.fetchall() 47 | assert bool(result is not None and result[0][0] == 1) 48 | """) 49 | -------------------------------------------------------------------------------- /tests/test_postgres.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "service_fixture", 8 | [ 9 | "postgres_service", 10 | "postgres_12_service", 11 | "postgres_13_service", 12 | "postgres_14_service", 13 | "postgres_15_service", 14 | "postgres_16_service", 15 | "postgres_17_service", 16 | "alloydb_omni_service", 17 | "pgvector_service", 18 | ], 19 | ) 20 | def test_service_fixture(pytester: pytest.Pytester, service_fixture: str) -> None: 21 | pytester.makepyfile(f""" 22 | import pytest 23 | import psycopg 24 | from pytest_databases.docker.postgres import _make_connection_string # noqa: PLC2701 25 | 26 | 27 | pytest_plugins = [ 28 | "pytest_databases.docker.postgres", 29 | ] 30 | 31 | def test({service_fixture}) -> None: 32 | with psycopg.connect( 33 | _make_connection_string( 34 | host={service_fixture}.host, 35 | port={service_fixture}.port, 36 | user={service_fixture}.user, 37 | password={service_fixture}.password, 38 | database={service_fixture}.database, 39 | ) 40 | ) as conn: 41 | db_open = conn.execute("SELECT 1").fetchone() 42 | assert db_open is not None and db_open[0] == 1 43 | """) 44 | 45 | result = pytester.runpytest() 46 | result.assert_outcomes(passed=1) 47 | 48 | 49 | @pytest.mark.parametrize( 50 | "connection_fixture", 51 | [ 52 | "postgres_connection", 53 | "postgres_11_connection", 54 | "postgres_12_connection", 55 | "postgres_13_connection", 56 | "postgres_14_connection", 57 | "postgres_15_connection", 58 | "postgres_16_connection", 59 | "postgres_17_connection", 60 | "alloydb_omni_connection", 61 | "pgvector_connection", 62 | ], 63 | ) 64 | def test_startup_connection_fixture(pytester: pytest.Pytester, connection_fixture: str) -> None: 65 | pytester.makepyfile(f""" 66 | import pytest 67 | import psycopg 68 | from pytest_databases.docker.postgres import _make_connection_string # noqa: PLC2701 69 | 70 | 71 | pytest_plugins = [ 72 | "pytest_databases.docker.postgres", 73 | ] 74 | 75 | def test({connection_fixture}) -> None: 76 | {connection_fixture}.execute("CREATE TABLE if not exists simple_table as SELECT 1") 77 | result = {connection_fixture}.execute("select * from simple_table").fetchone() 78 | assert result is not None and result[0] == 1 79 | """) 80 | 81 | result = pytester.runpytest() 82 | result.assert_outcomes(passed=1) 83 | 84 | 85 | def test_xdist_isolate_db(pytester: pytest.Pytester) -> None: 86 | pytester.makepyfile(""" 87 | import pytest 88 | import psycopg 89 | from pytest_databases.docker.postgres import _make_connection_string # noqa: PLC2701 90 | 91 | 92 | pytest_plugins = ["pytest_databases.docker.postgres"] 93 | 94 | def test_two(postgres_connection) -> None: 95 | postgres_connection.execute("CREATE TABLE foo AS SELECT 1") 96 | 97 | def test_two(postgres_connection) -> None: 98 | postgres_connection.execute("CREATE TABLE foo AS SELECT 1") 99 | """) 100 | 101 | result = pytester.runpytest("-n", "2") 102 | result.assert_outcomes(passed=1) 103 | 104 | 105 | def test_xdist_isolate_server(pytester: pytest.Pytester) -> None: 106 | pytester.makepyfile(""" 107 | import pytest 108 | import psycopg 109 | from pytest_databases.docker.postgres import _make_connection_string 110 | 111 | pytest_plugins = [ 112 | "pytest_databases.docker.postgres", 113 | ] 114 | 115 | @pytest.fixture(scope="session") 116 | def xdist_postgres_isolation_level(): 117 | return "server" 118 | 119 | def test_one(postgres_service) -> None: 120 | with psycopg.connect( 121 | _make_connection_string( 122 | host=postgres_service.host, 123 | port=postgres_service.port, 124 | user=postgres_service.user, 125 | password=postgres_service.password, 126 | database=postgres_service.database, 127 | ), 128 | autocommit=True, 129 | ) as conn: 130 | conn.execute("CREATE DATABASE foo") 131 | 132 | def test_two(postgres_service) -> None: 133 | with psycopg.connect( 134 | _make_connection_string( 135 | host=postgres_service.host, 136 | port=postgres_service.port, 137 | user=postgres_service.user, 138 | password=postgres_service.password, 139 | database=postgres_service.database, 140 | ), 141 | autocommit=True, 142 | ) as conn: 143 | conn.execute("CREATE DATABASE foo") 144 | """) 145 | 146 | result = pytester.runpytest_subprocess("-n", "2") 147 | result.assert_outcomes(passed=2) 148 | -------------------------------------------------------------------------------- /tests/test_redis.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | pytest_plugins = [ 6 | "pytest_databases.docker.redis", 7 | ] 8 | 9 | 10 | @pytest.fixture( 11 | params=[ 12 | pytest.param("redis:latest", id="redis"), 13 | pytest.param("valkey/valkey", id="valkey"), 14 | pytest.param("eqalpha/keydb", id="keydb"), 15 | pytest.param("docker.dragonflydb.io/dragonflydb/dragonfly", id="dragonflydb"), 16 | ] 17 | ) 18 | def redis_image_name(request: pytest.FixtureRequest) -> str: 19 | return request.param 20 | 21 | 22 | @pytest.fixture( 23 | params=[ 24 | pytest.param("redis_service", id="redis"), 25 | pytest.param("keydb_service", id="keydb"), 26 | pytest.param("dragonfly_service", id="dragonflydb"), 27 | ] 28 | ) 29 | def redis_compatible_service(request: pytest.FixtureRequest) -> str: 30 | return request.param 31 | 32 | 33 | def test_redis_image(pytester: pytest.Pytester, redis_image_name: str) -> None: 34 | pytester.makepyfile(f""" 35 | import pytest 36 | import redis 37 | from pytest_databases.docker.redis import RedisService 38 | from pytest_databases.helpers import get_xdist_worker_num 39 | 40 | pytest_plugins = [ 41 | "pytest_databases.docker.redis", 42 | ] 43 | 44 | @pytest.fixture(scope="session") 45 | def redis_image(): 46 | return "{redis_image_name}" 47 | 48 | def test_redis_service(redis_service: RedisService) -> None: 49 | assert redis.Redis.from_url("redis://", host=redis_service.host, port=redis_service.port).ping() 50 | """) 51 | result = pytester.runpytest() 52 | result.assert_outcomes(passed=1) 53 | 54 | 55 | def test_default_no_xdist(pytester: pytest.Pytester, redis_compatible_service: str) -> None: 56 | pytester.makepyfile(f""" 57 | import pytest 58 | import redis 59 | from pytest_databases.docker.redis import RedisService 60 | from pytest_databases.helpers import get_xdist_worker_num 61 | 62 | pytest_plugins = [ 63 | "pytest_databases.docker.redis", 64 | ] 65 | 66 | def test_redis_service({redis_compatible_service}: RedisService) -> None: 67 | assert redis.Redis.from_url("redis://", host={redis_compatible_service}.host, port={redis_compatible_service}.port).ping() 68 | """) 69 | result = pytester.runpytest() 70 | result.assert_outcomes(passed=1) 71 | 72 | 73 | def test_xdist_isolate_database(pytester: pytest.Pytester, redis_compatible_service: str) -> None: 74 | pytester.makepyfile(f""" 75 | import pytest 76 | import redis 77 | from pytest_databases.docker.redis import RedisService 78 | from pytest_databases.helpers import get_xdist_worker_num 79 | 80 | pytest_plugins = [ 81 | "pytest_databases.docker.redis", 82 | ] 83 | 84 | def test_one({redis_compatible_service}: RedisService) -> None: 85 | client = redis.Redis.from_url("redis://", host={redis_compatible_service}.host, port={redis_compatible_service}.port, db={redis_compatible_service}.db) 86 | assert not client.get("one") 87 | client.set("one", "1") 88 | assert {redis_compatible_service}.db == get_xdist_worker_num() 89 | 90 | 91 | def test_two({redis_compatible_service}: RedisService) -> None: 92 | client = redis.Redis.from_url("redis://", host={redis_compatible_service}.host, port={redis_compatible_service}.port, db={redis_compatible_service}.db) 93 | assert not client.get("one") 94 | client.set("one", "1") 95 | assert {redis_compatible_service}.db == get_xdist_worker_num() 96 | """) 97 | result = pytester.runpytest("-n", "2") 98 | result.assert_outcomes(passed=2) 99 | 100 | 101 | def test_xdist_isolate_server(pytester: pytest.Pytester, redis_compatible_service: str) -> None: 102 | pytester.makepyfile(f""" 103 | import pytest 104 | import redis 105 | from pytest_databases.docker.redis import RedisService 106 | from pytest_databases.helpers import get_xdist_worker_num 107 | 108 | pytest_plugins = [ 109 | "pytest_databases.docker.redis", 110 | ] 111 | 112 | @pytest.fixture(scope="session") 113 | def xdist_redis_isolation_level(): 114 | return "server" 115 | 116 | 117 | def test_one({redis_compatible_service}: RedisService) -> None: 118 | client = redis.Redis.from_url("redis://", host={redis_compatible_service}.host, port={redis_compatible_service}.port, db={redis_compatible_service}.db) 119 | assert not client.get("one") 120 | client.set("one", "1") 121 | assert {redis_compatible_service}.db == 0 122 | 123 | 124 | def test_two({redis_compatible_service}: RedisService) -> None: 125 | client = redis.Redis.from_url("redis://", host={redis_compatible_service}.host, port={redis_compatible_service}.port, db={redis_compatible_service}.db) 126 | assert not client.get("one") 127 | client.set("one", "1") 128 | assert {redis_compatible_service}.db == 0 129 | """) 130 | result = pytester.runpytest("-n", "2") 131 | result.assert_outcomes(passed=2) 132 | -------------------------------------------------------------------------------- /tests/test_spanner.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | if TYPE_CHECKING: 6 | import pytest 7 | 8 | 9 | def test_service_fixture(pytester: pytest.Pytester) -> None: 10 | pytester.makepyfile(""" 11 | from google.cloud import spanner 12 | import contextlib 13 | 14 | pytest_plugins = ["pytest_databases.docker.spanner"] 15 | 16 | def test_spanner_service(spanner_service) -> None: 17 | spanner_client = spanner.Client( 18 | project=spanner_service.project, 19 | credentials=spanner_service.credentials, 20 | client_options=spanner_service.client_options, 21 | ) 22 | instance = spanner_client.instance(spanner_service.instance_name) 23 | with contextlib.suppress(Exception): 24 | instance.create() 25 | 26 | database = instance.database(spanner_service.database_name) 27 | with contextlib.suppress(Exception): 28 | database.create() 29 | 30 | with database.snapshot() as snapshot: 31 | resp = next(iter(snapshot.execute_sql("SELECT 1"))) 32 | assert resp[0] == 1 33 | """) 34 | 35 | result = pytester.runpytest("-vv") 36 | result.assert_outcomes(passed=1) 37 | 38 | 39 | def test_spanner_connection(pytester: pytest.Pytester) -> None: 40 | pytester.makepyfile(""" 41 | from google.cloud import spanner 42 | pytest_plugins = ["pytest_databases.docker.spanner"] 43 | 44 | def test(spanner_connection) -> None: 45 | assert isinstance(spanner_connection, spanner.Client) 46 | """) 47 | 48 | result = pytester.runpytest() 49 | result.assert_outcomes(passed=1) 50 | -------------------------------------------------------------------------------- /tests/test_valkey.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import pytest 4 | 5 | pytest_plugins = ["pytest_databases.docker.valkey"] 6 | 7 | 8 | @pytest.fixture(params=[pytest.param("valkey_service", id="valkey")]) 9 | def valkey_compatible_service(request: pytest.FixtureRequest) -> str: 10 | return request.param 11 | 12 | 13 | def test_default_no_xdist(pytester: pytest.Pytester, valkey_compatible_service: str) -> None: 14 | pytester.makepyfile(f""" 15 | import pytest 16 | import valkey 17 | from pytest_databases.docker.valkey import ValkeyService 18 | from pytest_databases.helpers import get_xdist_worker_num 19 | 20 | pytest_plugins = [ 21 | "pytest_databases.docker.valkey", 22 | ] 23 | 24 | def test_valkey_service({valkey_compatible_service}: ValkeyService) -> None: 25 | assert valkey.Valkey.from_url("valkey://", host={valkey_compatible_service}.host, port={valkey_compatible_service}.port).ping() 26 | """) 27 | result = pytester.runpytest() 28 | result.assert_outcomes(passed=1) 29 | 30 | 31 | def test_xdist_isolate_database(pytester: pytest.Pytester, valkey_compatible_service: str) -> None: 32 | pytester.makepyfile(f""" 33 | import pytest 34 | import valkey 35 | from pytest_databases.docker.valkey import ValkeyService 36 | from pytest_databases.helpers import get_xdist_worker_num 37 | 38 | pytest_plugins = [ 39 | "pytest_databases.docker.valkey", 40 | ] 41 | 42 | def test_one({valkey_compatible_service}: ValkeyService) -> None: 43 | client = valkey.Valkey.from_url("valkey://", host={valkey_compatible_service}.host, port={valkey_compatible_service}.port) 44 | assert client.ping() 45 | assert {valkey_compatible_service}.db == get_xdist_worker_num() 46 | 47 | 48 | def test_two({valkey_compatible_service}: ValkeyService) -> None: 49 | client = valkey.Valkey.from_url("valkey://", host={valkey_compatible_service}.host, port={valkey_compatible_service}.port) 50 | assert not client.get("one") 51 | client.set("one", "1") 52 | assert {valkey_compatible_service}.db == get_xdist_worker_num() 53 | """) 54 | result = pytester.runpytest("-n", "2") 55 | result.assert_outcomes(passed=2) 56 | 57 | 58 | def test_xdist_isolate_server(pytester: pytest.Pytester, valkey_compatible_service: str) -> None: 59 | pytester.makepyfile(f""" 60 | import pytest 61 | import valkey 62 | from pytest_databases.docker.valkey import ValkeyService 63 | from pytest_databases.helpers import get_xdist_worker_num 64 | 65 | pytest_plugins = [ 66 | "pytest_databases.docker.valkey", 67 | ] 68 | 69 | @pytest.fixture(scope="session") 70 | def xdist_valkey_isolation_level(): 71 | return "server" 72 | 73 | 74 | def test_one({valkey_compatible_service}: ValkeyService) -> None: 75 | client = valkey.Valkey.from_url("valkey://", host={valkey_compatible_service}.host, port={valkey_compatible_service}.port) 76 | assert client.ping() 77 | assert {valkey_compatible_service}.db == 0 78 | 79 | 80 | def test_two({valkey_compatible_service}: ValkeyService) -> None: 81 | client = valkey.Valkey.from_url("valkey://", host={valkey_compatible_service}.host, port={valkey_compatible_service}.port) 82 | assert not client.get("one") 83 | client.set("one", "1") 84 | assert {valkey_compatible_service}.db == 0 85 | """) 86 | result = pytester.runpytest("-n", "2") 87 | result.assert_outcomes(passed=2) 88 | --------------------------------------------------------------------------------