├── tests ├── fixture │ ├── moto │ │ ├── __init__.py │ │ ├── test_create_bucket.py │ │ ├── test_basic.py │ │ └── test_create_s3_files.py │ ├── postgres │ │ ├── __init__.py │ │ └── test_sqlalchemy2_base_class.py │ ├── redshift │ │ ├── __init__.py │ │ └── test_patch.py │ ├── non_session_container │ │ ├── __init__.py │ │ ├── test_static_port.py │ │ └── test_dynamic_port.py │ ├── __init__.py │ ├── test_statements.py │ ├── test_rows.py │ ├── test_postgres.py │ ├── test_pmr_credentials.py │ ├── test_mongo.py │ ├── test_database.py │ └── test_engine_manager.py ├── test_non_session_container.py ├── conftest.py ├── examples │ ├── test_multiprocess_redis_database │ │ ├── conftest.py │ │ └── test_split.py │ └── test_multiprocess_container_cleanup_race_condition │ │ ├── conftest.py │ │ └── test_split.py ├── __init__.py ├── test_examples.py └── test_config.py ├── src └── pytest_mock_resources │ ├── py.typed │ ├── patch │ ├── __init__.py │ └── redshift │ │ ├── __init__.py │ │ ├── psycopg2.py │ │ ├── sqlalchemy.py │ │ ├── mock_s3_copy.py │ │ └── mock_s3_unload.py │ ├── fixture │ ├── moto │ │ ├── __init__.py │ │ ├── action.py │ │ └── base.py │ ├── base.py │ ├── __init__.py │ ├── mongo.py │ ├── redis.py │ ├── mysql.py │ └── redshift │ │ ├── __init__.py │ │ └── udf.py │ ├── plugin.py │ ├── container │ ├── __init__.py │ ├── moto.py │ ├── redshift.py │ ├── mongo.py │ ├── redis.py │ ├── mysql.py │ ├── postgres.py │ └── base.py │ ├── compat │ ├── import_.py │ ├── sqlalchemy.py │ └── __init__.py │ ├── action.py │ ├── __init__.py │ ├── cli.py │ ├── config.py │ ├── credentials.py │ └── hooks.py ├── .coveragerc ├── codecov.yml ├── docs ├── requirements.txt ├── source │ ├── index.rst │ ├── api.rst │ ├── relational │ │ ├── index.rst │ │ ├── basics.rst │ │ ├── manual-engines.rst │ │ └── template-database.rst │ ├── quickstart.rst │ ├── postgres.rst │ ├── docker_client.rst │ ├── conf.py │ ├── redshift.rst │ ├── fixtures.rst │ ├── contributing.rst │ ├── async.rst │ ├── redis.rst │ ├── mongo.rst │ ├── cli.rst │ ├── config.rst │ ├── ci.rst │ ├── sqlite.rst │ └── moto.rst └── Makefile ├── readthedocs.yml ├── .bumpversion.cfg ├── setup.cfg ├── .github ├── ISSUE_TEMPLATE │ ├── feature_request.md │ └── issue.md └── workflows │ ├── release.yml │ └── test.yml ├── .gitignore ├── LICENSE ├── Makefile ├── pyproject.toml └── README.md /tests/fixture/moto/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/fixture/postgres/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/fixture/redshift/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/patch/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/fixture/non_session_container/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/patch/redshift/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [coverage:run] 2 | branch = True 3 | omit = tests/* 4 | -------------------------------------------------------------------------------- /tests/fixture/__init__.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | pytest.register_assert_rewrite("tests.fixture.redshift.utils") 4 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | project: 4 | default: 5 | target: auto 6 | threshold: 5% 7 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | m2r2==0.2.7 2 | sphinx==3.3.0 3 | sphinx_rtd_theme 4 | sphinx_autodoc_typehints 5 | sphinx-autobuild 6 | docutils<0.18 7 | mistune==0.8.4 8 | . 9 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | build: 2 | image: latest 3 | 4 | python: 5 | version: 3.7 6 | pip_install: true 7 | 8 | requirements: docs/requirements.txt 9 | sphinx: 10 | configuration: docs/conf.py 11 | -------------------------------------------------------------------------------- /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 1.2.0 3 | commit = False 4 | tag = False 5 | allow_dirty = True 6 | 7 | [bumpversion:file:pyproject.toml] 8 | 9 | [bumpversion:file:./docs/source/conf.py] 10 | -------------------------------------------------------------------------------- /tests/test_non_session_container.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pytest_mock_resources.container.postgres import PostgresConfig 4 | 5 | 6 | @pytest.fixture(scope="session") 7 | def pmr_postgres_config(): 8 | return PostgresConfig(port=None) 9 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | pytest_plugins = "pytester" 4 | 5 | 6 | # See https://github.com/spulec/moto/issues/3292#issuecomment-770682026 7 | @pytest.fixture(autouse=True) 8 | def set_aws_region(monkeypatch): 9 | monkeypatch.setenv("AWS_DEFAULT_REGION", "us-east-1") 10 | -------------------------------------------------------------------------------- /tests/examples/test_multiprocess_redis_database/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pytest_mock_resources import create_redis_fixture, RedisConfig 4 | 5 | 6 | @pytest.fixture(scope="session") 7 | def pmr_redis_config(): 8 | return RedisConfig(port=6380) 9 | 10 | 11 | redis = create_redis_fixture() 12 | -------------------------------------------------------------------------------- /tests/examples/test_multiprocess_container_cleanup_race_condition/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pytest_mock_resources import create_postgres_fixture, PostgresConfig 4 | 5 | 6 | @pytest.fixture(scope="session") 7 | def pmr_postgres_config(): 8 | return PostgresConfig(port=5433) 9 | 10 | 11 | pg = create_postgres_fixture() 12 | -------------------------------------------------------------------------------- /tests/fixture/moto/test_create_bucket.py: -------------------------------------------------------------------------------- 1 | from pytest_mock_resources import create_moto_fixture, S3Bucket 2 | 3 | moto = create_moto_fixture(S3Bucket("foo")) 4 | 5 | 6 | def test_create_bucket(moto): 7 | s3 = moto.client("s3") 8 | buckets = s3.list_buckets()["Buckets"] 9 | assert len(buckets) == 1 10 | assert buckets[0]["Name"] == "foo" 11 | -------------------------------------------------------------------------------- /tests/fixture/moto/test_basic.py: -------------------------------------------------------------------------------- 1 | from pytest_mock_resources import create_moto_fixture 2 | 3 | moto = create_moto_fixture() 4 | 5 | 6 | def test_create_bucket(moto): 7 | s3 = moto.client("s3") 8 | s3.create_bucket(Bucket="foo") 9 | buckets = s3.list_buckets()["Buckets"] 10 | assert len(buckets) == 1 11 | assert buckets[0]["Name"] == "foo" 12 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max_line_length = 200 3 | ignore = W503 4 | 5 | [pydocstyle] 6 | ignore = D1,D200,D202,D203,D204,D213,D406,D407,D413 7 | match_dir = ^[^\.{]((?!igrations).)* 8 | 9 | [coverage:run] 10 | source = src 11 | branch = True 12 | parallel = True 13 | 14 | [coverage:report] 15 | show_missing = True 16 | skip_covered = True 17 | exclude_lines = pragma: no cover 18 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/fixture/moto/__init__.py: -------------------------------------------------------------------------------- 1 | from pytest_mock_resources.fixture.moto.action import MotoAction, S3Bucket, S3Object 2 | from pytest_mock_resources.fixture.moto.base import ( 3 | create_moto_fixture, 4 | Credentials, 5 | pmr_moto_config, 6 | pmr_moto_container, 7 | Session, 8 | ) 9 | 10 | __all__ = [ 11 | "Credentials", 12 | "MotoAction", 13 | "S3Bucket", 14 | "S3Object", 15 | "Session", 16 | "create_moto_fixture", 17 | "pmr_moto_config", 18 | "pmr_moto_container", 19 | ] 20 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to Pytest Mock Resource's documentation! 2 | ================================================ 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | :caption: Contents: 7 | 8 | Quickstart 9 | Fixtures 10 | Asyncio 11 | Fixture Configuration 12 | CLI (Startup Lag) 13 | CI Support 14 | Docker/Podman/Nerdctl 15 | API 16 | Contributing 17 | 18 | 19 | Indices and tables 20 | ================== 21 | 22 | * :ref:`genindex` 23 | * :ref:`modindex` 24 | * :ref:`search` 25 | -------------------------------------------------------------------------------- /tests/fixture/postgres/test_sqlalchemy2_base_class.py: -------------------------------------------------------------------------------- 1 | from tests import is_at_least_sqlalchemy2 2 | 3 | if is_at_least_sqlalchemy2: 4 | from sqlalchemy import Column, Integer 5 | from sqlalchemy.orm import DeclarativeBase 6 | 7 | from pytest_mock_resources import create_postgres_fixture 8 | 9 | class Base(DeclarativeBase): 10 | ... 11 | 12 | class Thing(Base): 13 | __tablename__ = "thing" 14 | 15 | id = Column(Integer, autoincrement=True, primary_key=True) 16 | 17 | pg = create_postgres_fixture(Base, session=True) 18 | 19 | def test_creates_ddl(pg): 20 | rows = pg.query(Thing).all() 21 | assert len(rows) == 0 22 | -------------------------------------------------------------------------------- /docs/source/api.rst: -------------------------------------------------------------------------------- 1 | API 2 | === 3 | 4 | Fixture Functions 5 | ----------------- 6 | 7 | .. automodule:: pytest_mock_resources 8 | :noindex: 9 | :members: create_mongo_fixture, create_mysql_fixture, create_postgres_fixture, create_redis_fixture, create_redshift_fixture, create_sqlite_fixture, Rows, Statements, StaticStatements 10 | 11 | 12 | .. automodule:: pytest_mock_resources.fixture.credentials 13 | :members: Credentials 14 | 15 | Fixture Config 16 | -------------- 17 | 18 | .. automodule:: pytest_mock_resources 19 | :noindex: 20 | :members: pmr_mongo_config, pmr_mysql_config, pmr_postgres_config, pmr_redis_config, MongoConfig, MysqlConfig, PostgresConfig, RedisConfig 21 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from pytest_mock_resources.compat import sqlalchemy 6 | 7 | is_at_least_sqlalchemy2 = sqlalchemy.version.startswith("2.") 8 | is_sqlalchemy2 = sqlalchemy.version.startswith("1.4") or sqlalchemy.version.startswith("2.") 9 | skip_if_sqlalchemy2 = pytest.mark.skipif( 10 | is_sqlalchemy2, 11 | reason="Incompatible with sqlalchemy 2 behavior", 12 | ) 13 | skip_if_not_sqlalchemy2 = pytest.mark.skipif( 14 | not is_sqlalchemy2, 15 | reason="Incompatible before sqlalchemy 2 behavior", 16 | ) 17 | 18 | skip_if_ci = pytest.mark.skipif( 19 | os.environ.get("CI") == "true", 20 | reason="Incompatible with CI behavior", 21 | ) 22 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/plugin.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import sys 3 | from typing import Iterable 4 | 5 | if sys.version_info < (3, 8): 6 | import importlib_metadata 7 | else: 8 | import importlib.metadata as importlib_metadata 9 | 10 | 11 | def find_entrypoints() -> Iterable[str]: 12 | modules = set() 13 | for dist in importlib_metadata.distributions(): 14 | for ep in dist.entry_points: 15 | if ep.group.lower() != "pmr": 16 | continue 17 | 18 | modules.add(ep.value) 19 | return sorted(modules) 20 | 21 | 22 | def load_entrypoints(modules: Iterable[str]): 23 | for module in modules: 24 | importlib.import_module(module) 25 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Issue 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | Description of what the bug is. 12 | 13 | **Environment** 14 | * Host OS 15 | * Docker image if applicable 16 | * Python Version 17 | * Virtualenv/Pyenv etc.. if applicable 18 | 19 | **To Reproduce** 20 | Steps to reproduce the behavior: 21 | 1. Go to '...' 22 | 2. Run '....' 23 | 3. See error 24 | 25 | **Expected behavior** 26 | A clear and concise description of what you expected to happen. 27 | 28 | **Actual Behavior** 29 | Include the traceback! 30 | 31 | **Additional context** 32 | Add any other context about the problem here. 33 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/container/__init__.py: -------------------------------------------------------------------------------- 1 | from pytest_mock_resources.container.base import get_container 2 | from pytest_mock_resources.container.mongo import MongoConfig 3 | from pytest_mock_resources.container.moto import MotoConfig 4 | from pytest_mock_resources.container.mysql import MysqlConfig 5 | from pytest_mock_resources.container.postgres import PostgresConfig 6 | from pytest_mock_resources.container.redis import RedisConfig 7 | from pytest_mock_resources.container.redshift import RedshiftConfig 8 | 9 | __all__ = [ 10 | "get_container", 11 | "MongoConfig", 12 | "MysqlConfig", 13 | "PostgresConfig", 14 | "PostgresConfig", 15 | "RedisConfig", 16 | "RedshiftConfig", 17 | "MotoConfig", 18 | ] 19 | -------------------------------------------------------------------------------- /docs/source/relational/index.rst: -------------------------------------------------------------------------------- 1 | Relational Database Fixtures 2 | ============================ 3 | 4 | All of the officially supported relational databases (SQLite, Postgres, Redshift, 5 | and MYSQL) support a minimum level of parity of features. Generally they will 6 | all have intercompatibile function signatures, except wherein there are particular 7 | features supported by a database which is unsupported in one of the others. 8 | 9 | For database-specific support, see the corresponding :ref:`redshift-label`, :ref:`Postgres`, or :ref:`SQLite` pages. 10 | 11 | 12 | .. toctree:: 13 | 14 | Basics 15 | Preset DDL/Data 16 | Manually Constructed Engines 17 | Template Databases 18 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = source 8 | BUILDDIR = build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 20 | 21 | livehtml: 22 | sphinx-autobuild -b html $(SOURCEDIR) $(SPHINXOPTS) $(BUILDDIR)/html 23 | -------------------------------------------------------------------------------- /docs/source/quickstart.rst: -------------------------------------------------------------------------------- 1 | Quickstart 2 | ========== 3 | 4 | Docker 5 | ------ 6 | 7 | In order to run tests which interact with **most** fixture types (sqlite being an example of one 8 | such exception). Docker needs to be available and running: 9 | 10 | Make sure you have docker installed: 11 | 12 | * MacOs_ 13 | * Nix_ 14 | * Windows_ 15 | 16 | 17 | Once you have docker installed, :code:`pytest` will automatically up and down any necessary docker 18 | containers so you don't have to, by default all containers will be spun up/down per :code:`pytest` 19 | invocation. 20 | 21 | 22 | .. mdinclude:: ../../README.md 23 | 24 | 25 | .. _MacOs: https://docs.docker.com/docker-for-mac/install/ 26 | .. _Nix: https://docs.docker.com/install/ 27 | .. _Windows: https://docs.docker.com/docker-for-windows/install/ 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Editors 2 | .idea/ 3 | .tags* 4 | *.iml 5 | *.iws 6 | *.sublime-project 7 | *.sublime-workspace 8 | *.sw? 9 | *~ 10 | .vscode 11 | Session.vim 12 | .ropeproject/ 13 | 14 | # IPython Notebook 15 | .ipynb_checkpoints 16 | 17 | # Mac 18 | .DS_Store 19 | Icon 20 | 21 | # Distribution / packaging 22 | .Python 23 | __pycache__/ 24 | *.py[cod] 25 | *$py.class 26 | *.egg 27 | *.egg-info/ 28 | eggs/ 29 | .eggs/ 30 | .mypy_cache/ 31 | dist/ 32 | build/ 33 | pip-wheel-metadata/ 34 | 35 | # Testing 36 | .coverage 37 | .cache 38 | coverage.xml 39 | .coverage.* 40 | *.cover 41 | junit_results.xml 42 | coverage/ 43 | .pytest_cache/ 44 | 45 | # Logging 46 | log/ 47 | 48 | # Environments 49 | .env 50 | .envrc 51 | .venv 52 | env/ 53 | venv/ 54 | ENV/ 55 | env.bak/ 56 | venv.bak/ 57 | .python-version 58 | 59 | # Documentation 60 | docs/_* 61 | -------------------------------------------------------------------------------- /tests/test_examples.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.mark.redis 5 | def test_multiprocess_redis_database(pytester): 6 | pytester.copy_example() 7 | 8 | # The `-n 4` are here is tightly coupled with the implementation of `test_split.py`. 9 | args = ["-vv", "-n", "4", "--pmr-multiprocess-safe", "test_split.py"] 10 | result = pytester.inline_run(*args) 11 | result.assertoutcome(passed=4, skipped=0, failed=0) 12 | 13 | 14 | @pytest.mark.postgres 15 | def test_multiprocess_container_cleanup_race_condition(pytester): 16 | pytester.copy_example() 17 | 18 | # The `-n 2` are here is tightly coupled with the implementation of `test_split.py`. 19 | args = ["-vv", "-n", "2", "--pmr-multiprocess-safe", "test_split.py"] 20 | result = pytester.inline_run(*args) 21 | result.assertoutcome(passed=2, skipped=0, failed=0) 22 | -------------------------------------------------------------------------------- /docs/source/postgres.rst: -------------------------------------------------------------------------------- 1 | Postgres 2 | ======== 3 | 4 | .. note:: 5 | 6 | The default postgres driver support is `psycopg2` for synchronous fixtures, 7 | and `asyncpg` for async fixtures. If you want to use a different driver, you 8 | can configure the `drivername` field using the `pmr_postgres_config` fixture: 9 | 10 | .. code-block:: python 11 | 12 | from pytest_mock_resources import PostgresConfig 13 | 14 | @pytest.fixture(scope='session') 15 | def pmr_postgres_config(): 16 | return PostgresConfig(drivername='postgresql+psycopg2') # but whatever driver you require. 17 | 18 | Note however, that the `asyncpg` driver **only** works with the async fixture, and the 19 | `psycopg2` driver **only** works with the synchronous fixture. These are inherent 20 | attributes of the drivers/support within SQLAlchemy. 21 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Github Release/Publish PyPi 2 | 3 | on: 4 | push: 5 | tags: 6 | - "v*.*.*" 7 | 8 | jobs: 9 | gh-release: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@v1 14 | - name: Release 15 | uses: softprops/action-gh-release@v1 16 | with: 17 | generate_release_notes: true 18 | 19 | publish-pypi: 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: actions/checkout@v3 23 | - uses: actions/setup-python@v4 24 | with: 25 | python-version: "3.9" 26 | - name: Run image 27 | uses: abatilo/actions-poetry@v2.0.0 28 | with: 29 | poetry-version: 1.5.1 30 | 31 | - name: Publish 32 | env: 33 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 34 | run: | 35 | poetry config pypi-token.pypi $PYPI_TOKEN 36 | poetry publish --build 37 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/compat/import_.py: -------------------------------------------------------------------------------- 1 | from types import ModuleType 2 | 3 | 4 | class ImportAdaptor(ModuleType): 5 | __wrapped__ = False 6 | 7 | def __init__(self, package, recommended_extra, fail_message=None, **attrs): 8 | self.package = package 9 | self.recommended_extra = recommended_extra 10 | self.fail_message = fail_message 11 | 12 | for key, value in attrs.items(): 13 | setattr(self, key, value) 14 | 15 | def fail(self): 16 | if self.fail_message: 17 | fail_message = self.fail_message 18 | else: 19 | fail_message = "Cannot use {recommended_extra} fixtures without {package}. pip install pytest-mock-resources[{recommended_extra}]".format( 20 | package=self.package, recommended_extra=self.recommended_extra 21 | ) 22 | 23 | raise RuntimeError(fail_message) 24 | 25 | def __getattr__(self, attr): 26 | self.fail() 27 | -------------------------------------------------------------------------------- /docs/source/docker_client.rst: -------------------------------------------------------------------------------- 1 | Docker/Podman/Nerdctl 2 | ===================== 3 | 4 | Docker-alike clients which are CLI-compatible with Docker, i.e. podman and nerdctl, 5 | can alternatively be configured to be used instead of docker. 6 | 7 | There are a number of ways to configure this setting, depending on the scenarios 8 | in which you expect the code to be used. 9 | 10 | Known-compatible string values for all settings options are: docker, podman, and nerdctl. 11 | 12 | * Environment variable `PMR_DOCKER_CLIENT=docker`: Use the environment variable option if 13 | the setting is environment-specific. 14 | 15 | * CLI options `pytest --pmr-docker-client docker`: Use this option for ad-hoc selection 16 | 17 | * pytest.ini setting `pmr_docker_client=docker`: Use this option to default all users to 18 | the selected value 19 | 20 | * Fallback: If none of the above options are set, each of the above options will be 21 | searched for, in order. The first option to be found will be used. 22 | 23 | Note, this fallback logic will be executed at most once per test run and cached. 24 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # 2 | # Configuration file for the Sphinx documentation builder. 3 | # 4 | # This file does only contain a selection of the most common options. For a 5 | # full list see the documentation: 6 | # http://www.sphinx-doc.org/en/master/config 7 | 8 | import os 9 | import sys 10 | 11 | sys.path.insert(0, os.path.abspath("..")) 12 | 13 | project = "Pytest Mock Resources" 14 | release = "1.2.2" 15 | version = "1.2.2" 16 | 17 | extensions = [ 18 | "m2r2", 19 | "sphinx.ext.autodoc", 20 | "sphinx_autodoc_typehints", 21 | "sphinx.ext.autosectionlabel", 22 | "sphinx.ext.intersphinx", 23 | "sphinx.ext.napoleon", 24 | ] 25 | 26 | templates_path = ["_templates"] 27 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 28 | source_suffix = [".rst", ".md"] 29 | 30 | html_theme = "sphinx_rtd_theme" 31 | html_static_path = ["_static"] 32 | html_sidebars = {"**": ["globaltoc.html", "relations.html", "sourcelink.html", "searchbox.html"]} 33 | 34 | intersphinx_mapping = {"https://docs.python.org/": None} 35 | 36 | autoclass_content = "both" 37 | master_doc = "index" 38 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/fixture/base.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | from typing import Union 3 | 4 | import pytest 5 | from typing_extensions import Literal 6 | 7 | 8 | def generate_fixture_id(enabled: bool = True, name=""): 9 | if enabled: 10 | uuid_str = str(uuid.uuid4()).replace("-", "_") 11 | return "_".join(["pmr_template", name, uuid_str]) 12 | return None 13 | 14 | 15 | def asyncio_fixture(async_fixture, scope="function"): 16 | # pytest-asyncio in versions >=0.17 force you to use a `pytest_asyncio.fixture` 17 | # call instead of `pytest.fixture`. Given that this would introduce an unnecessary 18 | # dependency on pytest-asyncio (when there are other alternatives) seems less than 19 | # ideal, so instead we can just set the flag that they set, as the indicator. 20 | async_fixture._force_asyncio_fixture = True 21 | 22 | fixture = pytest.fixture(scope=scope) 23 | return fixture(async_fixture) 24 | 25 | 26 | Scope = Union[ 27 | Literal["session"], 28 | Literal["package"], 29 | Literal["module"], 30 | Literal["class"], 31 | Literal["function"], 32 | ] 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2020 Known LLC 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 14 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 15 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 16 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, 17 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 18 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE 19 | OR OTHER DEALINGS IN THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /tests/fixture/non_session_container/test_static_port.py: -------------------------------------------------------------------------------- 1 | """Test the ability for non-session container fixtures to be overridden. 2 | 3 | While these tests can execute in CI, as-is, they won't test fixture 4 | teardown of containers in CI (where the container is pre-allocated by 5 | CI). Perhaps something can be worked out using a dind kind of setup. 6 | """ 7 | import pytest 8 | from sqlalchemy import text 9 | 10 | from pytest_mock_resources import create_postgres_fixture, get_container, PostgresConfig 11 | 12 | 13 | @pytest.fixture 14 | def pmr_postgres_container(pytestconfig, pmr_postgres_config: PostgresConfig): 15 | yield from get_container(pytestconfig, pmr_postgres_config) 16 | 17 | 18 | pg = create_postgres_fixture(session=True) 19 | 20 | 21 | class Test_postgres_fixture: 22 | """Test the postgres fixture. 23 | 24 | We need at least 2 (or more) tests to verify that the fixtures are not 25 | clobbering one another. 26 | """ 27 | 28 | def test_one(self, pg): 29 | pg.execute(text("select 1")) 30 | 31 | def test_two(self, pg): 32 | pg.execute(text("select 1")) 33 | 34 | def test_three(self, pg): 35 | pg.execute(text("select 1")) 36 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: lock install-base install test-base test-parallel test lint format build-package build-docs build publish 2 | .DEFAULT_GOAL := help 3 | 4 | # Install 5 | lock: 6 | poetry lock 7 | 8 | install-base: 9 | poetry install 10 | 11 | install: 12 | poetry install -E postgres -E postgres-async -E redshift -E mongo -E redis -E mysql -E moto 13 | 14 | ## Test 15 | test-base: 16 | SQLALCHEMY_WARN_20=1 coverage run -a -m \ 17 | pytest src tests -vv \ 18 | -m 'not postgres and not redshift and not mongo and not redis and not mysql and not moto' 19 | 20 | test-parallel: 21 | SQLALCHEMY_WARN_20=1 coverage run -m pytest -n 4 src tests -vv --pmr-multiprocess-safe 22 | 23 | test: test-parallel 24 | SQLALCHEMY_WARN_20=1 coverage run -a -m pytest src tests -vv 25 | coverage report 26 | coverage xml 27 | 28 | ## Lint 29 | lint: 30 | ruff --fix src tests || exit 1 31 | ruff format -q src tests || exit 1 32 | mypy src tests --show-error-codes || exit 1 33 | 34 | format: 35 | ruff src tests --fix 36 | ruff format src tests 37 | 38 | ## Build 39 | build-package: 40 | poetry build 41 | 42 | build-docs: 43 | pip install -r docs/requirements.txt 44 | make -C docs html 45 | 46 | build: build-package 47 | 48 | publish: build 49 | poetry publish -u __token__ -p '${PYPI_PASSWORD}' --no-interaction 50 | -------------------------------------------------------------------------------- /tests/fixture/non_session_container/test_dynamic_port.py: -------------------------------------------------------------------------------- 1 | """Test the ability for non-session container fixtures to dynamically 2 | 3 | While these tests can execute in CI, as-is, they won't test fixture 4 | teardown of containers in CI (where the container is pre-allocated by 5 | CI). Perhaps something can be worked out using a dind kind of setup. 6 | """ 7 | import pytest 8 | from sqlalchemy import text 9 | 10 | from pytest_mock_resources import create_postgres_fixture, get_container, PostgresConfig 11 | from tests import skip_if_ci 12 | 13 | 14 | @pytest.fixture(scope="session") 15 | def pmr_postgres_config(): 16 | return PostgresConfig(port=None) 17 | 18 | 19 | @pytest.fixture 20 | def pmr_postgres_container(pytestconfig, pmr_postgres_config: PostgresConfig): 21 | yield from get_container(pytestconfig, pmr_postgres_config) 22 | 23 | 24 | pg = create_postgres_fixture(session=True) 25 | 26 | 27 | @skip_if_ci 28 | class Test_postgres_fixture: 29 | """Test the postgres fixture. 30 | 31 | We need at least 2 (or more) tests to verify that the fixtures are not 32 | clobbering one another. 33 | """ 34 | 35 | def test_one(self, pg): 36 | pg.execute(text("select 1")) 37 | 38 | def test_two(self, pg): 39 | pg.execute(text("select 1")) 40 | 41 | def test_three(self, pg): 42 | pg.execute(text("select 1")) 43 | -------------------------------------------------------------------------------- /tests/examples/test_multiprocess_redis_database/test_split.py: -------------------------------------------------------------------------------- 1 | """Produce a test example which would induce a redis database related race condition. 2 | 3 | The premise is that given a pytest invocation: `pytest -n 4 test_split.py`, 4 | multiple processes would start up simultaneously, and all connecting to the same 5 | redis database. 6 | 7 | The tests would then proceed to clobber one another's values, leading to flaky 8 | tests. 9 | 10 | A correct implementation would use some mechanism to avoid this inter-parallel-test 11 | key conflict problem. 12 | """ 13 | import random 14 | import time 15 | 16 | 17 | def test_node_one(redis, pytestconfig): 18 | run_test(redis, pytestconfig) 19 | 20 | 21 | def test_node_two(redis, pytestconfig): 22 | run_test(redis, pytestconfig) 23 | 24 | 25 | def test_node_three(redis, pytestconfig): 26 | run_test(redis, pytestconfig) 27 | 28 | 29 | def test_node_four(redis, pytestconfig): 30 | run_test(redis, pytestconfig) 31 | 32 | 33 | def run_test(redis, pytestconfig): 34 | worker_id = int(pytestconfig.workerinput["workerid"][2:]) 35 | database = redis.connection_pool.get_connection("set").db 36 | assert worker_id == database 37 | 38 | redis.set("foo", "bar") 39 | time.sleep(random.randrange(1, 10) / 10) 40 | value = redis.get("foo") 41 | 42 | assert value == b"bar" 43 | 44 | redis.flushdb() 45 | -------------------------------------------------------------------------------- /docs/source/redshift.rst: -------------------------------------------------------------------------------- 1 | .. toctree:: 2 | 3 | .. _redshift-label: 4 | 5 | Redshift 6 | ======== 7 | 8 | COPY/UNLOAD 9 | ~~~~~~~~~~~ 10 | 11 | :code:`COPY` and :code:`UNLOAD` will work out of the box, when you're testing code which accepts 12 | a sqlalchemy engine or session object, because we can preconfigure it to work properly. In these 13 | scenarios, you should simply be able to send in the fixture provided into your test and be on 14 | your merry way. 15 | 16 | This **should** also work seamlessly if you're testing code which creates its own connection directly. 17 | Consider the following module that creates a redshift engine and then uses said engine to run 18 | a :code:`COPY` command: 19 | 20 | .. code-block:: python 21 | 22 | # src/app.py 23 | from sqlalchemy import create_engine 24 | 25 | def main(**connect_args): 26 | engine = get_redshift_engine(connect_args) 27 | 28 | return run_major_thing(engine) 29 | 30 | def get_redshift_engine(config): 31 | return create_engine(**config) 32 | 33 | def run_major_thing(engine): 34 | engine.execute( 35 | """ 36 | COPY x.y FROM 's3://bucket/file.csv' credentials.... 37 | """ 38 | ) 39 | 40 | 41 | The :code:`redshift` fixture should automatically instrument direct calls to 42 | :func:`psycopg2.connect` (or :func:`sqlalchemy.create_engine`. 43 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/action.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import abc 4 | from typing import ClassVar, Iterable 5 | 6 | 7 | class AbstractAction(metaclass=abc.ABCMeta): 8 | fixtures: ClassVar[tuple[str, ...]] = () 9 | static_safe: ClassVar[bool] = False 10 | 11 | @abc.abstractmethod 12 | def apply(self, conn): 13 | """Execute an action against the provided fixture connection.""" 14 | 15 | 16 | def validate_actions(actions, *, fixture: str | None, additional_types: Iterable = ()): 17 | for action in actions: 18 | if not isinstance(action, (AbstractAction, *additional_types)): 19 | extra_types_str = ", ".join( 20 | ["`" + ".".join([cls.__module__, cls.__name__]) + "`" for cls in additional_types] 21 | ) 22 | raise ValueError( 23 | f"`{action}` invalid: create_{fixture}_fixture function accepts " 24 | f"{extra_types_str}, or `AbstractAction` subclasses as inputs." 25 | ) 26 | 27 | if fixture and isinstance(action, AbstractAction) and fixture not in action.fixtures: 28 | supported_fixtures = ", ".join([f"`create_{f}_fixture`" for f in action.fixtures]) 29 | raise ValueError( 30 | f"`{action}` invalid: `{action.__class__}` is being used with `create_{fixture}_fixture` " 31 | f"but only supports: {supported_fixtures}." 32 | ) 33 | -------------------------------------------------------------------------------- /docs/source/fixtures.rst: -------------------------------------------------------------------------------- 1 | Fixtures 2 | ======== 3 | 4 | This package gives you the capability to create as many fixtures to represent as many mock instances 5 | of e.g. SQLite, Postgres, Mongo, etc might **actually** exist in your system. 6 | 7 | Furthermore, you can prepopulate the connections those fixtures yield to you with whatever 8 | DDL, preset data, or functions you might require. 9 | 10 | A new resource (database or otherwise) is created on a per test database, which allows each 11 | fixture to be used in multiple tests without risking data leakage or side-effects from one 12 | test to another. 13 | 14 | .. note:: 15 | 16 | By default the underlying containers are reused across tests to amortize the container startup 17 | cost. Tests then create new "resources" (e.g. databases) within that container to avoid 18 | inter-test pollution. 19 | 20 | This **can** cause inter-test dependencies if your tests are altering container-global resources 21 | like database users. In the event this is a problem, resources can be configured to **not** 22 | be session fixtures, although this will likely be drastically slower overall. 23 | 24 | See :ref:`Config` for information on customizing the configuration for docker-based fixtures. 25 | 26 | .. toctree:: 27 | 28 | Relational database fixtures 29 | Postgres 30 | Redshift 31 | SQLite 32 | Mongo 33 | Moto 34 | Redis 35 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/compat/sqlalchemy.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import sqlalchemy 4 | import sqlalchemy.engine.url 5 | from sqlalchemy.schema import MetaData 6 | 7 | from pytest_mock_resources.compat.import_ import ImportAdaptor 8 | 9 | version = getattr(sqlalchemy, "__version__", "") 10 | 11 | 12 | if version.startswith("1.4") or version.startswith("2."): 13 | from sqlalchemy.ext import asyncio 14 | from sqlalchemy.orm import declarative_base, DeclarativeMeta 15 | 16 | URL = sqlalchemy.engine.url.URL.create 17 | 18 | select = sqlalchemy.select 19 | else: 20 | from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta 21 | 22 | URL = sqlalchemy.engine.url.URL # type: ignore[assignment] 23 | 24 | asyncio = ImportAdaptor( 25 | "SQLAlchemy", 26 | "SQLAlchemy >= 1.4", 27 | fail_message="Cannot use sqlalchemy async features with SQLAlchemy < 1.4.\n", 28 | ) 29 | 30 | def _select(*args, **kwargs): 31 | return sqlalchemy.select(list(args), **kwargs) 32 | 33 | select = _select 34 | 35 | 36 | def extract_model_base_metadata(base) -> Optional[sqlalchemy.MetaData]: 37 | metadata = getattr(base, "metadata", None) 38 | if isinstance(metadata, MetaData): 39 | return metadata 40 | 41 | return None 42 | 43 | 44 | __all__ = [ 45 | "asyncio", 46 | "declarative_base", 47 | "DeclarativeMeta", 48 | "URL", 49 | "select", 50 | "version", 51 | ] 52 | -------------------------------------------------------------------------------- /docs/source/contributing.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | Prerequisites 5 | ------------- 6 | 7 | If you are not already familiar with Poetry_, this is a poetry project, so you'll need this! 8 | 9 | Getting Setup 10 | ------------- 11 | 12 | See the :code:`Makefile` for common commands, but for some basic setup: 13 | 14 | .. code-block:: bash 15 | 16 | # Installs the package with all the extras 17 | make install 18 | 19 | And you'll want to make sure you can run the tests and linters successfully: 20 | 21 | .. code-block:: bash 22 | 23 | # Runs CI-level tests, with coverage reports 24 | make test lint 25 | 26 | 27 | Tests 28 | ~~~~~ 29 | 30 | A feature of the package is that it doesn't stop you from running tests in parallel, such as 31 | by using :code:`pytest-xdist`. As such :code:`make test` runs the tests in a few different modes. 32 | 33 | In general, you can simply run `pytest`, or e.x. `pytest tests/fixture/database/test_udf.py` to 34 | run specific subsets of the tests. 35 | 36 | 37 | Docs 38 | ~~~~ 39 | 40 | First, install the docs requirements with :code:`pip install -r docs/requirements.txt`, 41 | then use :code:`sphinx` as normal. i.e. 42 | 43 | .. code-block:: bash 44 | 45 | cd docs 46 | make html # one-time build of the docs 47 | # or 48 | make livehtml # Starts a webserver with livereload of changes 49 | 50 | 51 | Need help 52 | --------- 53 | 54 | Submit an issue! 55 | 56 | .. _Poetry: https://poetry.eustace.io/ 57 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/container/moto.py: -------------------------------------------------------------------------------- 1 | from typing import ClassVar, Iterable 2 | 3 | from pytest_mock_resources.config import DockerContainerConfig 4 | from pytest_mock_resources.container.base import ContainerCheckFailed 5 | 6 | 7 | class MotoConfig(DockerContainerConfig): 8 | """Define the configuration object for moto. 9 | 10 | Args: 11 | image (str): The docker image:tag specifier to use for postgres containers. 12 | Defaults to :code:`"postgres:9.6.10-alpine"`. 13 | host (str): The hostname under which a mounted port will be available. 14 | Defaults to :code:`"localhost"`. 15 | port (int): The port to bind the container to. 16 | Defaults to :code:`5532`. 17 | ci_port (int): The port to bind the container to when a CI environment is detected. 18 | Defaults to :code:`5432`. 19 | """ 20 | 21 | name = "moto" 22 | 23 | _fields: ClassVar[Iterable] = {"image", "host", "port"} 24 | _fields_defaults: ClassVar[dict] = { 25 | "image": "motoserver/moto:4.0.6", 26 | "port": 5555, 27 | } 28 | 29 | def ports(self): 30 | return {5000: self.port} 31 | 32 | def check_fn(self): 33 | import requests 34 | 35 | try: 36 | url = endpoint_url(self) 37 | requests.get(url, timeout=60) 38 | except requests.exceptions.RequestException: 39 | raise ContainerCheckFailed( 40 | f"Unable to connect to a presumed moto test container via given config: {self}" 41 | ) 42 | 43 | 44 | def endpoint_url(config): 45 | return f"http://{config.host}:{config.port}" 46 | -------------------------------------------------------------------------------- /docs/source/async.rst: -------------------------------------------------------------------------------- 1 | Async 2 | ===== 3 | 4 | In general, pytest-mock-resources >=2.0 is required for async support and will naturally require 5 | python >= 36. 6 | 7 | Async is easily supportable **outside** pytest-mock-resources, by simply using the `pmr__config` 8 | fixture for the given resource to get a handle on the requisite configuration required to produce a client yourself. 9 | 10 | For example: 11 | 12 | .. code-block:: python 13 | 14 | from sqlalchemy.engine.url import URL 15 | from sqlalchemy.ext.asyncio import create_async_engine 16 | 17 | @pytest.fixture 18 | def async_pg(pmr_postgres_config): 19 | # or `URL.create` in sqlalchemy 1.4+ 20 | create_async_engine(URL(host=pmr_postgres_config.host, database=pmr_postgres_config.database, ...)) 21 | 22 | 23 | However, we're happy to support default/built-in async client implementations where applicable. 24 | 25 | Today, async engines are implemented for: 26 | * postgres, using sqlalchemy >=1.4 (with the asyncpg driver) 27 | 28 | Generally, support will be available on a per-fixture basis by way of specifying `async_=True` to the 29 | fixture creation function. 30 | 31 | For example 32 | 33 | .. code-block:: python 34 | 35 | import pytest 36 | from sqlalchemy import text 37 | from pytest_mock_resources import create_postgres_fixture 38 | 39 | postgres_async = create_postgres_fixture(async_=True) 40 | 41 | @pytest.mark.asyncio 42 | async def test_basic_postgres_fixture_async(postgres_async): 43 | async with postgres_async.connect() as conn: 44 | await conn.execute(text("select 1")) 45 | 46 | pytest-asyncio 47 | -------------- 48 | Generally you will want `pytest-asyncio` or similar to be installed. This will allow your async fixture to work 49 | the same way normal fixtures function. 50 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/container/redshift.py: -------------------------------------------------------------------------------- 1 | from typing import ClassVar, Iterable 2 | 3 | from pytest_mock_resources.container.postgres import PostgresConfig 4 | 5 | 6 | class RedshiftConfig(PostgresConfig): 7 | """Define the configuration object for Redshift. 8 | 9 | Args: 10 | image (str): The docker image:tag specifier to use for Redshift containers. 11 | Defaults to :code:`"postgres:9.6.10-alpine"`. 12 | host (str): The hostname under which a mounted port will be available. 13 | Defaults to :code:`"localhost"`. 14 | port (int): The port to bind the container to. 15 | Defaults to :code:`5532`. 16 | ci_port (int): The port to bind the container to when a CI environment is detected. 17 | Defaults to :code:`5432`. 18 | username (str): The username of the root Redshift user 19 | Defaults to :code:`"user"`. 20 | password (str): The password of the root Redshift password 21 | Defaults to :code:`"password"`. 22 | root_database (str): The name of the root Redshift database to create. 23 | Defaults to :code:`"dev"`. 24 | drivername (str): The sqlalchemy driver to use 25 | Defaults to :code:`"postgresql+psycopg2"`. 26 | """ 27 | 28 | name = "redshift" 29 | _fields: ClassVar[Iterable] = { 30 | "image", 31 | "host", 32 | "port", 33 | "ci_port", 34 | "username", 35 | "password", 36 | "root_database", 37 | "drivername", 38 | } 39 | _fields_defaults: ClassVar[dict] = { 40 | "image": "postgres:9.6.10-alpine", 41 | "port": 5532, 42 | "ci_port": 5432, 43 | "username": "user", 44 | "password": "password", 45 | "root_database": "dev", 46 | "drivername": None, 47 | } 48 | -------------------------------------------------------------------------------- /docs/source/redis.rst: -------------------------------------------------------------------------------- 1 | Redis 2 | ===== 3 | 4 | Users can test Redis dependent code using the `create_redis_fixture`. 5 | 6 | .. autofunction:: pytest_mock_resources.create_redis_fixture 7 | 8 | Consider the following example: 9 | 10 | .. code-block:: python 11 | 12 | # src/some_module.py 13 | 14 | def insert_into_friends(redis_client): 15 | redis_client.sadd("friends:leto", "ghanima") 16 | redis_client.sadd("friends:leto", "duncan") 17 | redis_client.sadd("friends:paul", "duncan") 18 | redis_client.sadd("friends:paul", "gurney") 19 | 20 | A user can test this as follows: 21 | 22 | .. code-block:: python 23 | 24 | # tests/some_test.py 25 | 26 | from pytest_mock_resources import create_redis_fixture 27 | from some_module import insert_into_friends 28 | 29 | redis = create_redis_fixture() 30 | 31 | def test_insert_into_friends(redis): 32 | insert_into_friends(redis) 33 | 34 | friends_leto = redis.smembers("friends:leto") 35 | friends_paul = redis.smembers("friends:paul") 36 | 37 | assert friends_leto == {b"duncan", b"ghanima"} 38 | assert friends_paul == {b"gurney", b"duncan"} 39 | 40 | 41 | Manual Engine Creation 42 | ---------------------- 43 | 44 | Engines can be created manually via the fixture's yielded attributes/REDIS_* fixtures: 45 | 46 | .. code-block:: python 47 | 48 | # tests/some_test.py 49 | 50 | from redis import Redis 51 | 52 | from pytest_mock_resources import create_redis_fixture 53 | 54 | redis = create_redis_fixture() 55 | 56 | 57 | def test_create_custom_connection(redis): 58 | client = Redis(**redis.pmr_credentials.as_redis_kwargs()) 59 | client.set("foo", "bar") 60 | client.append("foo", "baz") 61 | value = client.get("foo").decode("utf-8") 62 | assert value == "barbaz" 63 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/fixture/__init__.py: -------------------------------------------------------------------------------- 1 | from pytest_mock_resources.fixture.mongo import ( 2 | create_mongo_fixture, 3 | pmr_mongo_config, 4 | pmr_mongo_container, 5 | ) 6 | from pytest_mock_resources.fixture.moto import ( 7 | create_moto_fixture, 8 | pmr_moto_config, 9 | pmr_moto_container, 10 | S3Bucket, 11 | S3Object, 12 | ) 13 | from pytest_mock_resources.fixture.mysql import ( 14 | create_mysql_fixture, 15 | pmr_mysql_config, 16 | pmr_mysql_container, 17 | ) 18 | from pytest_mock_resources.fixture.postgresql import ( 19 | create_postgres_config_fixture, 20 | create_postgres_container_fixture, 21 | create_postgres_fixture, 22 | pmr_postgres_config, 23 | pmr_postgres_container, 24 | ) 25 | from pytest_mock_resources.fixture.redis import ( 26 | create_redis_fixture, 27 | pmr_redis_config, 28 | pmr_redis_container, 29 | ) 30 | from pytest_mock_resources.fixture.redshift import ( 31 | create_redshift_fixture, 32 | pmr_redshift_config, 33 | pmr_redshift_container, 34 | ) 35 | from pytest_mock_resources.fixture.sqlite import create_sqlite_fixture 36 | 37 | __all__ = [ 38 | "S3Bucket", 39 | "S3Object", 40 | "create_mongo_fixture", 41 | "create_moto_fixture", 42 | "create_mysql_fixture", 43 | "create_postgres_fixture", 44 | "create_postgres_config_fixture", 45 | "create_postgres_container_fixture", 46 | "create_redis_fixture", 47 | "create_redshift_fixture", 48 | "create_sqlite_fixture", 49 | "pmr_mongo_config", 50 | "pmr_mongo_container", 51 | "pmr_moto_config", 52 | "pmr_moto_container", 53 | "pmr_mysql_config", 54 | "pmr_mysql_container", 55 | "pmr_postgres_config", 56 | "pmr_postgres_container", 57 | "pmr_redis_config", 58 | "pmr_redis_container", 59 | "pmr_redshift_config", 60 | "pmr_redshift_container", 61 | ] 62 | -------------------------------------------------------------------------------- /docs/source/mongo.rst: -------------------------------------------------------------------------------- 1 | Mongo 2 | ===== 3 | 4 | Users can test MongoDB dependent code using the `create_mongo_fixture`. 5 | 6 | Consider the following example: 7 | 8 | .. code-block:: python 9 | 10 | # src/some_module.py 11 | 12 | def insert_into_customer(mongodb_connection): 13 | collection = mongodb_connection['customer'] 14 | to_insert = {"name": "John", "address": "Highway 37"} 15 | collection.insert_one(to_insert) 16 | 17 | A user can test this as follows: 18 | 19 | .. code-block:: python 20 | 21 | # tests/some_test.py 22 | 23 | from pytest_mock_resources import create_mongo_fixture 24 | from some_module import insert_into_customer 25 | 26 | mongo = create_mongo_fixture() 27 | 28 | def test_insert_into_customer(mongo): 29 | insert_into_customer(mongo) 30 | 31 | collection = mongo['customer'] 32 | returned = collection.find_one() 33 | 34 | assert returned == {"name": "John", "address": "Highway 37"} 35 | 36 | 37 | Custom Connections 38 | ------------------ 39 | 40 | Custom connections can also be generated via the fixture's yielded attributes/MONGO_* fixtures: 41 | 42 | .. code-block:: python 43 | 44 | # tests/some_test.py 45 | 46 | from pymongo import MongoClient 47 | 48 | from pytest_mock_resources import create_mongo_fixture 49 | 50 | mongo = create_mongo_fixture() 51 | 52 | 53 | def test_create_custom_connection(mongo): 54 | client = MongoClient(**mongo.pmr_credentials.as_mongo_kwargs()) 55 | db = client[mongo.config["database"]] 56 | 57 | collection = db["customers"] 58 | to_insert = [ 59 | {"name": "John"}, 60 | {"name": "Viola"}, 61 | ] 62 | collection.insert_many(to_insert) 63 | 64 | result = collection.find().sort("name") 65 | returned = [row for row in result] 66 | 67 | assert returned == to_insert 68 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/container/mongo.py: -------------------------------------------------------------------------------- 1 | from typing import ClassVar, Iterable 2 | 3 | from pytest_mock_resources.compat import pymongo 4 | from pytest_mock_resources.config import DockerContainerConfig, fallback 5 | from pytest_mock_resources.container.base import ContainerCheckFailed 6 | 7 | 8 | class MongoConfig(DockerContainerConfig): 9 | """Define the configuration object for mongo. 10 | 11 | Args: 12 | image (str): The docker image:tag specifier to use for mongo containers. 13 | Defaults to :code:`"mongo:3.6"`. 14 | host (str): The hostname under which a mounted port will be available. 15 | Defaults to :code:`"localhost"`. 16 | port (int): The port to bind the container to. 17 | Defaults to :code:`28017`. 18 | ci_port (int): The port to bind the container to when a CI environment is detected. 19 | Defaults to :code:`27017`. 20 | root_database (str): The name of the root mongo database to create. 21 | Defaults to :code:`"dev-mongo"`. 22 | """ 23 | 24 | name = "mongo" 25 | 26 | _fields: ClassVar[Iterable] = {"image", "host", "port", "ci_port", "root_database"} 27 | _fields_defaults: ClassVar[dict] = { 28 | "image": "mongo:3.6", 29 | "port": 28017, 30 | "ci_port": 27017, 31 | "root_database": "dev-mongo", 32 | } 33 | 34 | @fallback 35 | def root_database(self): 36 | raise NotImplementedError() 37 | 38 | def ports(self): 39 | return {27017: self.port} 40 | 41 | def check_fn(self): 42 | try: 43 | client = pymongo.MongoClient(self.host, self.port, timeoutMS=1000) 44 | db = client[self.root_database] 45 | db.command("ismaster") 46 | except pymongo.errors.PyMongoError: 47 | raise ContainerCheckFailed( 48 | f"Unable to connect to a presumed MongoDB test container via given config: {self}" 49 | ) 50 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/container/redis.py: -------------------------------------------------------------------------------- 1 | from typing import ClassVar, Iterable 2 | 3 | from pytest_mock_resources.compat import redis 4 | from pytest_mock_resources.config import DockerContainerConfig, fallback 5 | from pytest_mock_resources.container.base import ContainerCheckFailed 6 | 7 | 8 | class RedisConfig(DockerContainerConfig): 9 | """Define the configuration object for redis. 10 | 11 | Args: 12 | image (str): The docker image:tag specifier to use for redis containers. 13 | Defaults to :code:`"redis:5.0.7"`. 14 | host (str): The hostname under which a mounted port will be available. 15 | Defaults to :code:`"localhost"`. 16 | port (int): The port to bind the container to. 17 | Defaults to :code:`6380`. 18 | ci_port (int): The port to bind the container to when a CI environment is detected. 19 | Defaults to :code:`6379`. 20 | decode_responses (bool): Whether to decode responses from the server on the client. 21 | Defaults to :code:`False`. 22 | """ 23 | 24 | name = "redis" 25 | 26 | _fields: ClassVar[Iterable] = { 27 | "image", 28 | "host", 29 | "port", 30 | "ci_port", 31 | "decode_responses", 32 | } 33 | _fields_defaults: ClassVar[dict] = { 34 | "image": "redis:5.0.7", 35 | "port": 6380, 36 | "ci_port": 6379, 37 | "decode_responses": False, 38 | } 39 | 40 | @fallback 41 | def decode_responses(self): 42 | raise NotImplementedError() 43 | 44 | def ports(self): 45 | return {6379: self.port} 46 | 47 | def check_fn(self): 48 | try: 49 | client = redis.Redis(host=self.host, port=self.port) 50 | client.ping() 51 | except redis.ConnectionError: 52 | raise ContainerCheckFailed( 53 | f"Unable to connect to a presumed Redis test container via given config: {self}" 54 | ) 55 | -------------------------------------------------------------------------------- /tests/examples/test_multiprocess_container_cleanup_race_condition/test_split.py: -------------------------------------------------------------------------------- 1 | """Produce a test example which would induce a container cleanup race condition. 2 | 3 | The premise is that given a pytest invocation: `pytest -n 2 test_split.py`, 4 | the xdist implementation would collect the tests (in this case evenly among 5 | the two workers), fork the process, produce all the fixtures, run the tests, 6 | run fixture cleanup, complete. 7 | 8 | Specifically the "run fixture cleanup" step is the potential problem. One 9 | process will have "won" in the initial race to create the container, and 10 | will have a local reference to the created `container` object. 11 | 12 | So when it goes to try to clean up its container, any remaining tests in other 13 | workers may still be attempting to use the container and will fail. 14 | 15 | Often this **doesn't** happen because cleanup of session fixtures is the last 16 | thing to happen. Perhaps the first worker which produces the container is also 17 | the one which is most likely to complete last. Notably, due to the way (at least 18 | **our**) CircleCI docker config works, this test will strictly **always** pass 19 | in CI because no container cleanup happens in the first place. 20 | """ 21 | import time 22 | 23 | from sqlalchemy import text 24 | 25 | 26 | def test_node_one(pg, pytestconfig): 27 | containers = pytestconfig._pmr_containers 28 | delay(pg, containers) 29 | 30 | 31 | def test_node_two(pg, pytestconfig): 32 | containers = pytestconfig._pmr_containers 33 | delay(pg, containers) 34 | 35 | 36 | def delay(pg, containers): 37 | # Specifically, we need to artificially delay the completion of the test 38 | # inside the process which did **not** create the container, so that 39 | # the one which **did** completes early and (if there is a bug) gets 40 | # cleaned up first. 41 | if not containers: 42 | time.sleep(5) 43 | 44 | with pg.connect() as conn: 45 | conn.execute(text("select 1")) 46 | -------------------------------------------------------------------------------- /tests/fixture/moto/test_create_s3_files.py: -------------------------------------------------------------------------------- 1 | import io 2 | 3 | from pytest_mock_resources import create_moto_fixture, S3Bucket, S3Object 4 | 5 | bucket = S3Bucket("foo") 6 | moto = create_moto_fixture( 7 | bucket, 8 | # manual-construction style 9 | S3Object("foo", "a/b/c.txt", "hello!"), 10 | # fluent-method style 11 | bucket.object("b/text.txt", "This is text"), 12 | bucket.object("c/textio.txt", io.StringIO("This is textio")), 13 | bucket.object("d/bytes.txt", b"This is bytes"), 14 | bucket.object("e/binaryio.txt", io.BytesIO(b"This is binaryio")), 15 | ) 16 | 17 | 18 | def test_produce_objects(moto): 19 | resource = moto.resource("s3") 20 | objects = sorted(resource.Bucket("foo").objects.all(), key=lambda o: o.key) 21 | assert len(objects) == 5 22 | 23 | assert objects[0].key == "a/b/c.txt" 24 | assert objects[0].get()["Body"].read() == b"hello!" 25 | 26 | assert objects[1].key == "b/text.txt" 27 | assert objects[1].get()["Body"].read() == b"This is text" 28 | 29 | assert objects[2].key == "c/textio.txt" 30 | assert objects[2].get()["Body"].read() == b"This is textio" 31 | 32 | assert objects[3].key == "d/bytes.txt" 33 | assert objects[3].get()["Body"].read() == b"This is bytes" 34 | 35 | assert objects[4].key == "e/binaryio.txt" 36 | assert objects[4].get()["Body"].read() == b"This is binaryio" 37 | 38 | 39 | def test_2nd_test_using_same_fixture(moto): 40 | """Asserts you get the same fixture state in two tests.""" 41 | resource = moto.resource("s3") 42 | objects = list(resource.Bucket("foo").objects.all()) 43 | assert len(objects) == 5 44 | 45 | 46 | moto2 = create_moto_fixture(bucket) 47 | 48 | 49 | def test_separate_fixture_state(moto2): 50 | """Asserts that two tests referencing the same bucket do not share state.""" 51 | resource = moto2.resource("s3") 52 | objects = sorted(resource.Bucket("foo").objects.all(), key=lambda o: o.key) 53 | assert len(objects) == 0 54 | -------------------------------------------------------------------------------- /docs/source/relational/basics.rst: -------------------------------------------------------------------------------- 1 | Basics 2 | ------ 3 | Say you've written a function, which accepts a :class:`sqlalchemy.Engine`, and 4 | performs some operation which is literally not able to be tested without a 5 | connection to a real database. 6 | 7 | .. code-block:: python 8 | :caption: package/utilities.py 9 | 10 | def sql_sum(redshift_conn): 11 | """SUPER OPTIMIZED WAY to add up to 15. 12 | """ 13 | redshift_conn.execute("CREATE TEMP TABLE mytemp(c INT);") 14 | redshift_conn.execute( 15 | """ 16 | INSERT INTO mytemp(c) 17 | VALUES (1), (2), (3), (4), (5); 18 | """ 19 | ) 20 | 21 | return redshift_conn.execute("SELECT SUM(c) FROM mytemp;").fetchone() 22 | 23 | With this library, you would define your test fixture, for the corresponding 24 | database in use. And then any references to that fixture in a test, will produce 25 | a :class:`sqlalchemy.Engine`. 26 | 27 | Alternatively, you can specify `session=True`, to ensure you're handed a 28 | :class:`sqlalchemy.orm.Session` instead. 29 | 30 | .. code-block:: python 31 | :caption: tests/test_utilities.py 32 | 33 | # Redshift Example: 34 | from pytest_mock_resources import create_redshift_fixture 35 | from package.utilities import sql_sum 36 | 37 | db = create_redshift_fixture() 38 | # or 39 | db = create_redshift_fixture(session=True) 40 | 41 | def test_sql_sum(db): 42 | sql_sum(db) 43 | 44 | 45 | # Postgres Example: 46 | from pytest_mock_resources import create_postgres_fixture 47 | from package.utilities import sql_sum 48 | 49 | db = create_postgres_fixture() 50 | # or 51 | db = create_postgres_fixture(session=True) 52 | 53 | def test_sql_sum(db): 54 | sql_sum(db) 55 | 56 | 57 | Note that beyond your definition of the fixture, the test code remains 58 | exactly the same between examples among different databases. 59 | 60 | What's happening when under the hood is that a docker container (except 61 | with SQLite) is being spun up on a per-test-session basis, and then 62 | individual sub-container databases are being created on a per-test basis, 63 | and yielded to each test. 64 | -------------------------------------------------------------------------------- /tests/fixture/test_statements.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import text 2 | 3 | from pytest_mock_resources import ( 4 | create_mysql_fixture, 5 | create_postgres_fixture, 6 | create_redshift_fixture, 7 | create_sqlite_fixture, 8 | Statements, 9 | ) 10 | 11 | statements = Statements("CREATE VIEW cool_view as select 3", "CREATE VIEW cool_view_2 as select 1") 12 | postgres = create_postgres_fixture(statements) 13 | sqlite = create_sqlite_fixture(statements) 14 | mysql = create_mysql_fixture(statements) 15 | 16 | 17 | def test_statements_postgres(postgres): 18 | with postgres.connect() as conn: 19 | execute = conn.execute( 20 | text( 21 | """ 22 | SELECT table_name 23 | FROM INFORMATION_SCHEMA.views 24 | WHERE table_name in ('cool_view', 'cool_view_2') 25 | ORDER BY table_name 26 | """ 27 | ) 28 | ) 29 | 30 | result = [row[0] for row in execute] 31 | assert ["cool_view", "cool_view_2"] == result 32 | 33 | 34 | def test_statements_mysql(mysql): 35 | with mysql.connect() as conn: 36 | execute = conn.execute( 37 | text( 38 | """ 39 | SELECT table_name 40 | FROM INFORMATION_SCHEMA.views 41 | WHERE table_name in ('cool_view', 'cool_view_2') 42 | AND table_schema = (select database()) 43 | ORDER BY table_name 44 | """ 45 | ) 46 | ) 47 | 48 | result = [row[0] for row in execute] 49 | assert ["cool_view", "cool_view_2"] == result 50 | 51 | 52 | statements = Statements( 53 | text( 54 | """ 55 | CREATE TABLE account( 56 | user_id serial PRIMARY KEY, 57 | username VARCHAR (50) UNIQUE NOT NULL, 58 | password VARCHAR (50) NOT NULL 59 | ); 60 | INSERT INTO account VALUES (1, 'user1', 'password1') 61 | """ 62 | ) 63 | ) 64 | redshift = create_redshift_fixture(statements) 65 | 66 | 67 | def test_multi_statement_statements(redshift): 68 | with redshift.begin() as conn: 69 | execute = conn.execute(text("SELECT password FROM account")) 70 | 71 | result = sorted([row[0] for row in execute]) 72 | assert ["password1"] == result 73 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/compat/__init__.py: -------------------------------------------------------------------------------- 1 | from pytest_mock_resources.compat.import_ import ImportAdaptor 2 | 3 | # isort: split 4 | from pytest_mock_resources.compat import sqlalchemy 5 | 6 | try: 7 | import psycopg2 8 | except ImportError: 9 | fail_message = ( 10 | "Cannot use postgres/redshift fixtures without psycopg2.\n" 11 | "pip install pytest-mock-resources[postgres] or pytest-mock-resources[[postgres-binary].\n" 12 | "Additionally, pip install pytest-mock-resources[redshift] for redshift fixtures." 13 | ) 14 | psycopg2 = ImportAdaptor( 15 | "psycopg2", 16 | "postgres", 17 | fail_message=fail_message, 18 | extensions=ImportAdaptor( 19 | "psycopg2", "psycopg2", fail_message=fail_message, cursor=ImportAdaptor 20 | ), 21 | ) 22 | 23 | try: 24 | import asyncpg 25 | except ImportError: 26 | fail_message = ( 27 | "Cannot use postgres async fixtures without asyncpg.\n" 28 | "pip install pytest-mock-resources[postgres-async].\n" 29 | ) 30 | asyncpg = ImportAdaptor( 31 | "asyncpg", 32 | "postgres", 33 | fail_message=fail_message, 34 | extensions=ImportAdaptor( 35 | "asyncpg", "asyncpg", fail_message=fail_message, cursor=ImportAdaptor 36 | ), 37 | ) 38 | 39 | try: 40 | import boto3 41 | except ImportError: 42 | boto3 = ImportAdaptor("boto3", "redshift") 43 | 44 | try: 45 | import moto 46 | except ImportError: 47 | moto = ImportAdaptor("moto", "redshift") 48 | 49 | try: 50 | import sqlparse 51 | except ImportError: 52 | sqlparse = ImportAdaptor( 53 | "sqlparse", 54 | "redshift", 55 | ) 56 | 57 | try: 58 | import pymongo 59 | except ImportError: 60 | pymongo = ImportAdaptor("pymongo", "mongo") 61 | 62 | try: 63 | import redis 64 | except ImportError: 65 | redis = ImportAdaptor("redis", "redis") 66 | 67 | try: 68 | import pymysql 69 | except ImportError: 70 | pymysql = ImportAdaptor("pymysql", "mysql") 71 | 72 | 73 | __all__ = [ 74 | "sqlalchemy", 75 | "psycopg2", 76 | "asyncpg", 77 | "boto3", 78 | "moto", 79 | "sqlparse", 80 | "pymongo", 81 | "redis", 82 | "pymysql", 83 | ] 84 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/fixture/moto/action.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import io 4 | from dataclasses import dataclass 5 | from typing import BinaryIO, ClassVar, Iterable, TextIO, TYPE_CHECKING, Union 6 | 7 | from pytest_mock_resources.action import AbstractAction, validate_actions 8 | 9 | if TYPE_CHECKING: 10 | from pytest_mock_resources.fixture.moto.base import Session 11 | 12 | 13 | ObjectContent = Union[bytes, str, BinaryIO, TextIO] 14 | 15 | 16 | @dataclass 17 | class S3Bucket(AbstractAction): 18 | fixtures: ClassVar[tuple[str, ...]] = ("moto",) 19 | static_safe: ClassVar[bool] = True 20 | 21 | name: str 22 | 23 | def object(self, key: str, data: ObjectContent): 24 | return S3Object(self, key, data) 25 | 26 | def apply(self, session: Session): 27 | client = session.resource("s3") 28 | client.create_bucket(Bucket=self.name) 29 | 30 | 31 | @dataclass 32 | class S3Object(AbstractAction): 33 | fixtures: ClassVar[tuple[str, ...]] = ("moto",) 34 | static_safe: ClassVar[bool] = True 35 | 36 | bucket: str | S3Bucket 37 | key: str 38 | data: ObjectContent 39 | encoding: str = "utf-8" 40 | 41 | def apply(self, session: Session): 42 | resource = session.resource("s3") 43 | bucket_name = self.bucket.name if isinstance(self.bucket, S3Bucket) else self.bucket 44 | 45 | if isinstance(self.data, str): 46 | data = io.BytesIO(self.data.encode(self.encoding)) 47 | elif isinstance(self.data, bytes): 48 | data = io.BytesIO(self.data) 49 | elif isinstance(self.data, io.StringIO): 50 | data = io.BytesIO(self.data.getvalue().encode(self.encoding)) 51 | elif isinstance(self.data, io.BytesIO): 52 | data = io.BytesIO(self.data.getbuffer().tobytes()) 53 | else: 54 | raise NotImplementedError() 55 | 56 | resource.Object(bucket_name=bucket_name, key=self.key).upload_fileobj(data) 57 | 58 | 59 | MotoAction = Union[S3Bucket, S3Object] 60 | 61 | 62 | def apply_ordered_actions(session: Session, ordered_actions: Iterable[MotoAction]): 63 | validate_actions(ordered_actions, fixture="moto") 64 | for action in ordered_actions: 65 | action.apply(session) 66 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/__init__.py: -------------------------------------------------------------------------------- 1 | from pytest_mock_resources.container import ( 2 | get_container, 3 | MongoConfig, 4 | MotoConfig, 5 | MysqlConfig, 6 | PostgresConfig, 7 | RedisConfig, 8 | RedshiftConfig, 9 | ) 10 | from pytest_mock_resources.credentials import Credentials 11 | from pytest_mock_resources.fixture import ( 12 | create_mongo_fixture, 13 | create_moto_fixture, 14 | create_mysql_fixture, 15 | create_postgres_config_fixture, 16 | create_postgres_container_fixture, 17 | create_postgres_fixture, 18 | create_redis_fixture, 19 | create_redshift_fixture, 20 | create_sqlite_fixture, 21 | pmr_mongo_config, 22 | pmr_mongo_container, 23 | pmr_moto_config, 24 | pmr_moto_container, 25 | pmr_mysql_config, 26 | pmr_mysql_container, 27 | pmr_postgres_config, 28 | pmr_postgres_container, 29 | pmr_redis_config, 30 | pmr_redis_container, 31 | pmr_redshift_config, 32 | pmr_redshift_container, 33 | S3Bucket, 34 | S3Object, 35 | ) 36 | from pytest_mock_resources.hooks import ( # noqa 37 | pytest_addoption, 38 | pytest_configure, 39 | pytest_itemcollected, 40 | pytest_sessionfinish, 41 | ) 42 | from pytest_mock_resources.sqlalchemy import Rows, Statements, StaticStatements 43 | 44 | __all__ = [ 45 | "Credentials", 46 | "MongoConfig", 47 | "MotoConfig", 48 | "MysqlConfig", 49 | "PostgresConfig", 50 | "RedisConfig", 51 | "RedshiftConfig", 52 | "Rows", 53 | "Statements", 54 | "StaticStatements", 55 | "S3Bucket", 56 | "S3Object", 57 | "create_mongo_fixture", 58 | "create_moto_fixture", 59 | "create_mysql_fixture", 60 | "create_postgres_fixture", 61 | "create_postgres_config_fixture", 62 | "create_postgres_container_fixture", 63 | "create_redis_fixture", 64 | "create_redshift_fixture", 65 | "create_sqlite_fixture", 66 | "get_container", 67 | "pmr_mongo_config", 68 | "pmr_mongo_container", 69 | "pmr_moto_config", 70 | "pmr_moto_container", 71 | "pmr_mysql_config", 72 | "pmr_mysql_container", 73 | "pmr_postgres_config", 74 | "pmr_postgres_container", 75 | "pmr_redis_config", 76 | "pmr_redis_container", 77 | "pmr_redshift_config", 78 | "pmr_redshift_container", 79 | ] 80 | -------------------------------------------------------------------------------- /docs/source/cli.rst: -------------------------------------------------------------------------------- 1 | CLI 2 | === 3 | 4 | As you start writing tests, you might notice that there's a small delay after 5 | invoking the tests before they execute. Which can get particularly annoying 6 | if you're only running a small subset of your tests at a time. This delay is 7 | because the default execution mode will kill all the containers that were 8 | started by the test suite each time it's executed. 9 | 10 | However, some containers have a larger startup cost than others; Mongo and presto, 11 | in particular have significant startup costs up to 30s! Even Postgres, 12 | has ~ 1-2 seconds startup time; which you'll pay each time you invoke :code:`pytest`. 13 | 14 | Pytest Mock Resources ships with a small CLI utility ``pmr``, which can be 15 | used to help amortize the cost of container startup between test runs. With it, 16 | you can pre-create the container against which the tests will connect. 17 | 18 | For Redshift and Postgres: 19 | 20 | .. code-block:: bash 21 | 22 | $ pmr postgres 23 | 711f5d5a86896bb4eb76813af4fb6616aee0eff817cdec6ebaf4daa0e9995441 24 | # or specify the image 25 | PMR_POSTGRES_IMAGE=postgres:11 pmr postgres 26 | 711f5d5a86896bb4eb76813af4fb6616aee0eff817cdec6ebaf4daa0e9995441 27 | 28 | For Mongo: 29 | 30 | .. code-block:: bash 31 | 32 | $ pmr mongo 33 | 711f5d5a86896bb4eb76813af4fb6616aee0eff817cdec6ebaf4daa0e9995441 34 | # or specify the image 35 | PMR_MONGO_IMAGE=mongo:5.0 pmr mongo 36 | 711f5d5a86896bb4eb76813af4fb6616aee0eff817cdec6ebaf4daa0e9995441 37 | 38 | For MySQL: 39 | 40 | .. code-block:: bash 41 | 42 | $ pmr mysql 43 | 711f5d5a86896bb4eb76813af4fb6616aee0eff817cdec6ebaf4daa0e9995441 44 | # or specify the image 45 | PMR_MYSQL_IMAGE=postgres:8.0 pmr mysql 46 | 711f5d5a86896bb4eb76813af4fb6616aee0eff817cdec6ebaf4daa0e9995441 47 | 48 | You can check on the instance's state via: 49 | 50 | .. code-block:: bash 51 | 52 | $ docker ps 53 | CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 54 | 711f5d5a8689 postgres:9.6.10-alpine "docker-entrypoint.s…" 16 seconds ago Up 15 seconds 0.0.0.0:5532->5432/tcp determined_euclid 55 | 56 | You can terminate the instance whenever you want via: 57 | 58 | .. code-block:: bash 59 | 60 | $ pmr --stop postgres 61 | 711f5d5a86896bb4eb76813af4fb6616aee0eff817cdec6ebaf4daa0e9995441 62 | 63 | -------------------------------------------------------------------------------- /tests/fixture/test_rows.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, Integer, SmallInteger, text 2 | 3 | from pytest_mock_resources import create_mysql_fixture, create_postgres_fixture, Rows 4 | from pytest_mock_resources.compat.sqlalchemy import declarative_base 5 | 6 | Base = declarative_base() 7 | 8 | 9 | class Quarter(Base): 10 | __tablename__ = "quarter" 11 | 12 | id = Column(Integer, primary_key=True) 13 | year = Column(SmallInteger, nullable=False) 14 | quarter = Column(SmallInteger, nullable=False) 15 | 16 | 17 | rows = Rows( 18 | Quarter(id=1, year=2012, quarter=1), 19 | Quarter(id=2, year=2012, quarter=2), 20 | Quarter(id=3, year=2012, quarter=3), 21 | Quarter(id=4, year=2012, quarter=4), 22 | ) 23 | 24 | postgres = create_postgres_fixture(rows) 25 | mysql = create_mysql_fixture(rows) 26 | 27 | 28 | def test_rows_postgres(postgres): 29 | with postgres.begin() as conn: 30 | execute = conn.execute( 31 | text( 32 | """ 33 | SELECT * 34 | FROM quarter 35 | ORDER BY id 36 | """ 37 | ) 38 | ) 39 | assert [(1, 2012, 1), (2, 2012, 2), (3, 2012, 3), (4, 2012, 4)] == list(execute) 40 | 41 | 42 | def test_rows_mysql(mysql): 43 | with mysql.begin() as conn: 44 | execute = conn.execute(text("SELECT * FROM quarter ORDER BY id")) 45 | assert [(1, 2012, 1), (2, 2012, 2), (3, 2012, 3), (4, 2012, 4)] == list(execute) 46 | 47 | 48 | SecondBase = declarative_base() 49 | 50 | 51 | class Report(SecondBase): 52 | __tablename__ = "report" 53 | 54 | id = Column(Integer, primary_key=True) 55 | 56 | 57 | rows = Rows(Quarter(id=1, year=2012, quarter=1), Quarter(id=2, year=2012, quarter=2), Report(id=3)) 58 | base_2_postgres = create_postgres_fixture(rows, session=True) 59 | base_2_mysql = create_mysql_fixture(rows, session=True) 60 | 61 | 62 | def test_2_bases_postgres(base_2_postgres): 63 | execute = base_2_postgres.execute(text("SELECT * FROM quarter ORDER BY id")) 64 | assert [(1, 2012, 1), (2, 2012, 2)] == list(execute) 65 | 66 | execute = base_2_postgres.execute(text("SELECT * FROM report")) 67 | assert [(3,)] == list(execute) 68 | 69 | 70 | def test_2_bases_mysql(base_2_mysql): 71 | execute = base_2_mysql.execute(text("SELECT * FROM quarter ORDER BY id")) 72 | assert [(1, 2012, 1), (2, 2012, 2)] == list(execute) 73 | 74 | execute = base_2_mysql.execute(text("SELECT * FROM report")) 75 | assert [(3,)] == list(execute) 76 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | from pytest_mock_resources.config import DockerContainerConfig, fallback, get_env_config 4 | 5 | _DOCKER_HOST = "host.docker.internal" 6 | 7 | 8 | class Test_get_env_config: 9 | def test_it_is_missing_env_var(self): 10 | with mock.patch("os.environ", {}): 11 | value = get_env_config("postgres", "username") 12 | assert value is None 13 | 14 | def test_it_has_the_expected_env_var(self): 15 | with mock.patch("os.environ", {"PMR_POSTGRES_USERNAME": "foobar"}): 16 | value = get_env_config("postgres", "username") 17 | assert value == "foobar" 18 | 19 | 20 | class FooConfig(DockerContainerConfig): 21 | name = "foo" 22 | _fields = ("image", "host", "port", "ci_port", "extra_config", "no_value_default") 23 | _fields_defaults = {"image": "test", "extra_config": "bar", "port": 555} 24 | 25 | @fallback 26 | def extra_config(self): 27 | raise NotImplementedError() 28 | 29 | @fallback 30 | def no_value_default(self): 31 | raise NotImplementedError() 32 | 33 | 34 | class Test_FooConfig: 35 | def test_it_instantiates(self): 36 | foo = FooConfig() 37 | assert foo.image == "test" 38 | assert foo.extra_config == "bar" 39 | assert foo.no_value_default is None 40 | 41 | def test_gets_values_from_kwargs(self): 42 | foo = FooConfig(image="bar", extra_config="meow", no_value_default=4) 43 | assert foo.image == "bar" 44 | assert foo.extra_config == "meow" 45 | assert foo.no_value_default == 4 46 | 47 | def test_gets_values_from_env(self): 48 | with mock.patch( 49 | "os.environ", 50 | { 51 | "PMR_FOO_IMAGE": "bar", 52 | "PMR_FOO_EXTRA_CONFIG": "meow", 53 | "PMR_FOO_NO_VALUE_DEFAULT": "4", 54 | }, 55 | ): 56 | foo = FooConfig() 57 | 58 | assert foo.image == "bar" 59 | assert foo.extra_config == "meow" 60 | assert foo.no_value_default == "4" 61 | 62 | def test_repr(self): 63 | with mock.patch( 64 | "os.environ", 65 | { 66 | "PMR_FOO_EXTRA_CONFIG": "bar", 67 | }, 68 | ): 69 | foo = FooConfig(image="foo") 70 | result = repr(foo) 71 | assert ( 72 | result 73 | == "FooConfig(image='foo', host='localhost', port=555, ci_port=None, extra_config='bar', no_value_default=None)" 74 | ) 75 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/fixture/mongo.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pytest_mock_resources.compat import pymongo 4 | from pytest_mock_resources.container.base import get_container 5 | from pytest_mock_resources.container.mongo import MongoConfig 6 | from pytest_mock_resources.credentials import Credentials 7 | 8 | 9 | @pytest.fixture(scope="session") 10 | def pmr_mongo_config(): 11 | """Override this fixture with a :class:`MongoConfig` instance to specify different defaults. 12 | 13 | Examples: 14 | >>> @pytest.fixture(scope='session') 15 | ... def pmr_mongo_config(): 16 | ... return MongoConfig(image="mongo:3.4", root_database="foo") 17 | """ 18 | return MongoConfig() 19 | 20 | 21 | @pytest.fixture(scope="session") 22 | def pmr_mongo_container(pytestconfig, pmr_mongo_config): 23 | yield from get_container(pytestconfig, pmr_mongo_config) 24 | 25 | 26 | def create_mongo_fixture(scope="function"): 27 | """Produce a mongo fixture. 28 | 29 | Any number of fixture functions can be created. Under the hood they will all share the same 30 | database server. 31 | 32 | Arguments: 33 | scope: Passthrough pytest's fixture scope. 34 | """ 35 | 36 | @pytest.fixture(scope=scope) 37 | def _(pmr_mongo_container, pmr_mongo_config): 38 | return _create_clean_database(pmr_mongo_config) 39 | 40 | return _ 41 | 42 | 43 | def _create_clean_database(config): 44 | root_client = pymongo.MongoClient(config.host, config.port) 45 | root_db = root_client[config.root_database] 46 | 47 | # Create a collection called `pytestMockResourceDbs' in the admin tab if not already created. 48 | db_collection = root_db["pytestMockResourcesDbs"] 49 | 50 | # Create a Document in the `pytestMockResourcesDbs` collection. 51 | result = db_collection.insert_one({}) 52 | 53 | # Create a database where the name is equal to that ID. 54 | db_id = str(result.inserted_id) 55 | new_database = root_client[db_id] 56 | 57 | # Create a user as that databases owner 58 | password = "password" # noqa: S105 59 | new_database.command("createUser", db_id, pwd=password, roles=["dbOwner"]) 60 | 61 | # pass back an authenticated db connection 62 | limited_client = pymongo.MongoClient( 63 | config.host, config.port, username=db_id, password=password, authSource=db_id 64 | ) 65 | limited_db = limited_client[db_id] 66 | 67 | Credentials.assign_from_credentials( 68 | limited_db, 69 | drivername="mongodb", 70 | host=config.host, 71 | port=config.port, 72 | username=db_id, 73 | password=password, 74 | database=db_id, 75 | ) 76 | return limited_db 77 | -------------------------------------------------------------------------------- /docs/source/relational/manual-engines.rst: -------------------------------------------------------------------------------- 1 | Manually Constructed Engines 2 | ============================ 3 | Due to the dynamic nature of the creation of the databases themselves, its 4 | non-trivial for a user to know what the connection string, for example, would 5 | be for the database ahead of time. Which makes testing code which manually 6 | constructs its own :class:`sqlalchemy.Engine` objects internally more difficult. 7 | 8 | Therefore, generally preferable way to use the fixtures is that you will be yielded 9 | a preconstructed engine pointing at the database to which your test is intended 10 | to run against; and to write your code such that it accepts the engine as a 11 | function/class parameter. 12 | 13 | However, this is not always possible for all classes of tests, nor does it help 14 | for code which might already be written with a tightly coupled mechanism for 15 | engine creation. 16 | 17 | For (contrived) example: 18 | 19 | .. code-block:: python 20 | :caption: package/entrypoint.py 21 | 22 | import psycopg2 23 | import sqlalchemy 24 | 25 | def psycopg2_main(**config): 26 | conn = psycopg2.connect(**config) 27 | do_the_thing(conn) 28 | ... 29 | 30 | def sqlalchemy_main(**config): 31 | conn = sqlalchemy.create_engine(**config) 32 | do_the_thing(conn) 33 | ... 34 | 35 | As you can see, in order to test these functions, we must pass in valid **credentials** 36 | rather than an engine itself. 37 | 38 | pmr_credentials 39 | --------------- 40 | Each of the fixtures you might create will attach a :code:`pmr_credentials` 41 | attribute onto the engine it yields to the test which will be an instance of a 42 | :class:`Credentials` class. 43 | 44 | Attributes on this class include all the credentials required to connect to the 45 | particular database. Additionally, there are convenience methods specifically meant to 46 | coerce the credentials into a form directly accepted by common connection 47 | mechanisms like :class:`psycopg2.connect` or :class:`sqlalchemy.engine.url.URL`. 48 | 49 | .. code-block:: python 50 | 51 | from pytest_mock_resources import ( 52 | create_postgres_fixture, 53 | create_redshift_fixture, 54 | ) 55 | 56 | from package import entrypoint 57 | 58 | postgres = create_postgres_fixture() 59 | redshift = create_redshift_fixture() 60 | 61 | def test_psycopg2_main_postgres(postgres): 62 | credentials = postgres.pmr_credentials 63 | result = entrypoint.psycopg2_main(**credentials.as_psycopg2_connect_args()) 64 | assert result ... 65 | 66 | def test_sqlalchemy_main_postgres(postgres): 67 | credentials = postgres.pmr_credentials 68 | result = entrypoint.sqlalchemy_main(**credentials.as_url()) 69 | assert result ... 70 | -------------------------------------------------------------------------------- /tests/fixture/test_postgres.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, event, Integer 2 | 3 | from pytest_mock_resources import create_postgres_fixture, PostgresConfig 4 | from pytest_mock_resources.compat.sqlalchemy import declarative_base 5 | from pytest_mock_resources.container.postgres import get_sqlalchemy_engine 6 | from pytest_mock_resources.fixture.postgresql import _produce_clean_database 7 | 8 | Base = declarative_base() 9 | 10 | 11 | class Thing(Base): 12 | __tablename__ = "thing" 13 | 14 | id = Column(Integer, autoincrement=True, primary_key=True) 15 | 16 | 17 | createdb_template_pg = create_postgres_fixture(Base, createdb_template="template0") 18 | 19 | 20 | def test_create_clean_database_createdb_template(pmr_postgres_config, createdb_template_pg): 21 | """Assert `createdb_template` is included in emitted CREATE DATABASE statement.""" 22 | root_engine = get_sqlalchemy_engine( 23 | pmr_postgres_config, pmr_postgres_config.root_database, isolation_level="AUTOCOMMIT" 24 | ) 25 | 26 | statement = "" 27 | 28 | def before_execute(conn, clauseelement, multiparams, params, execution_options=None): 29 | # Search for our create database statement, so we can assert against it. 30 | if "CREATE DATABASE" in clauseelement.text: 31 | nonlocal statement 32 | statement = clauseelement.text 33 | return clauseelement, multiparams, params 34 | 35 | # Use the event system to hook into the statements being executed by sqlalchemy. 36 | with root_engine.connect().execution_options(isolation_level="AUTOCOMMIT") as conn: 37 | event.listen(conn, "before_execute", before_execute) 38 | _produce_clean_database(conn, createdb_template="template0") 39 | event.remove(conn, "before_execute", before_execute) 40 | 41 | assert "template0" in statement 42 | 43 | 44 | def test_createdb_template(createdb_template_pg): 45 | """Assert successful usage of a fixture which sets the `createdb_template` argument.""" 46 | with createdb_template_pg.begin() as conn: 47 | conn.execute(Thing.__table__.insert().values({"id": 1})) 48 | 49 | 50 | nested_transaction = create_postgres_fixture(Base) 51 | 52 | 53 | def test_nested_transaction(nested_transaction): 54 | """Assert success with a fixture relying on being in a transaction (like SAVEPOINT).""" 55 | with nested_transaction.begin() as conn: 56 | with conn.begin_nested(): 57 | conn.execute(Thing.__table__.insert().values({"id": 1})) 58 | 59 | 60 | def test_check_fn_env_based_port(nested_transaction): 61 | """Assert check_fn functions with non-int port.""" 62 | creds = nested_transaction.pmr_credentials 63 | config = PostgresConfig(host=creds.host, port=str(creds.port)) 64 | config.check_fn() 65 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/patch/redshift/psycopg2.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | from unittest import mock 3 | 4 | from sqlalchemy.sql.base import Executable 5 | 6 | from pytest_mock_resources.container.postgres import PostgresConfig 7 | from pytest_mock_resources.patch.redshift.mock_s3_copy import mock_s3_copy_command, strip 8 | from pytest_mock_resources.patch.redshift.mock_s3_unload import mock_s3_unload_command 9 | 10 | 11 | @contextlib.contextmanager 12 | def patch_connect(config: PostgresConfig, database: str): 13 | try: 14 | # Not all consumers of redshift may be using psycopg2, so it could be unavailable. 15 | import psycopg2 16 | except ImportError: 17 | yield 18 | return 19 | 20 | new_connect = mock_psycopg2_connect(config, database, _connect=psycopg2._connect) 21 | 22 | # We patch `psycopg2._connect` specifically because it allows us to patch the 23 | # connection regardless of the import style used by the caller. 24 | with mock.patch("psycopg2._connect", new=new_connect) as p: 25 | yield p 26 | 27 | 28 | def mock_psycopg2_connect(config: PostgresConfig, database: str, _connect): 29 | """Patch `psycopg2._connect`. 30 | 31 | Add support for S3 COPY and UNLOAD. 32 | """ 33 | import psycopg2 34 | 35 | class CustomCursor(psycopg2.extensions.cursor): 36 | """A custom cursor class to define a custom execute method.""" 37 | 38 | def execute(self, sql, args=None): 39 | if isinstance(sql, Executable): 40 | return super().execute(sql, args) 41 | 42 | if strip(sql).lower().startswith("copy"): 43 | mock_s3_copy_command(sql, self) 44 | sql = "commit" 45 | 46 | if strip(sql).lower().startswith("unload"): 47 | mock_s3_unload_command(sql, self) 48 | sql = "commit" 49 | 50 | return super().execute(sql, args) 51 | 52 | def _mock_psycopg2_connect(*args, **kwargs): 53 | """Substitute the default cursor with a custom cursor.""" 54 | conn = _connect(*args, **kwargs) 55 | dsn_info = conn.get_dsn_parameters() 56 | 57 | # We want to be sure to *only* patch the cursor's behavior when we think the 58 | # database connection is for the database we're specifically referencing. This 59 | # should prevent over-patching for connections which are not relevant to our 60 | # fixture. 61 | connection_info_matches = ( 62 | config.host == dsn_info["host"] 63 | and str(config.port) == dsn_info["port"] 64 | and database == dsn_info["dbname"] 65 | ) 66 | 67 | if connection_info_matches: 68 | conn.cursor_factory = CustomCursor 69 | return conn 70 | 71 | return _mock_psycopg2_connect 72 | -------------------------------------------------------------------------------- /docs/source/config.rst: -------------------------------------------------------------------------------- 1 | Config 2 | ------ 3 | 4 | In order to support various projects and environments in which tests might be run, each docker-based 5 | fixture has the ability to customize its default configuration. 6 | 7 | The precedence of the available config mechanisms follow the order: 8 | 9 | * Environment variables 10 | * Fixture Configuration 11 | * Default Configuration 12 | 13 | 14 | Environment Variables 15 | ~~~~~~~~~~~~~~~~~~~~~ 16 | 17 | In general we would only recommend use of the environment variable config for temporary 18 | changes to a value, or for configuration that is specific to the environment in which it is being run. 19 | 20 | A common use case for this mechanism is local port conflicts. When a container is started up, 21 | we bind to a pre-specified port for that resource kind. We (attempt to) avoid conflicts 22 | by binding to a non-standard port for that resource by default, but conflicts can still happen 23 | 24 | All configuration options for the given resource are available under env vars named in the pattern: 25 | 26 | .. code-block:: bash 27 | 28 | PMR_{RESOURCE}_{CONFIG} 29 | # e.x. 30 | export PMR_POSTGRES_PORT=54321 31 | 32 | Resource is the name of the resource, i.e. POSTGRES, MONGO, REDIS, etc 33 | 34 | CONFIG is the name of the config name. Every container will support at **least**: IMAGE, HOST, PORT, and CI_PORT. 35 | 36 | 37 | Fixture Configuration 38 | ~~~~~~~~~~~~~~~~~~~~~ 39 | 40 | In general, we recommend fixture configuration for persistent configuration that is an attribute 41 | of the project itself, rather than the environment in which the project is being run. 42 | 43 | The most common example of this will be :code:`image`. If you're running postgres:8.0.0 in production, 44 | you should not be testing with our default image version! Other resource-specific configurations, 45 | such as :code:`root_database`, might also be typical uses of this mechanism. 46 | 47 | Here, the pattern is by defining a fixture in the following pattern: 48 | 49 | .. code-block:: python 50 | 51 | @pytest.fixture(scope='session') 52 | def pmr_{resource}_config(): 53 | return {Resource}Config(...options...) 54 | 55 | I.e. :code:`pmr_postgres_config`, returning a :class:`PostgresConfig` type. might look like 56 | 57 | .. code-block:: python 58 | :caption: conftest.py 59 | 60 | import pytest 61 | from pytest_mock_resources import PostgresConfig 62 | 63 | @pytest.fixture(scope='session') 64 | def pmr_postgres_config(): 65 | return PostgresConfig(image='postgres:11.0.0') 66 | 67 | Default Configuration 68 | ~~~~~~~~~~~~~~~~~~~~~ 69 | 70 | Default configuration uses the same mechanism (i.e. fixture configuration) as you might, to 71 | pre-specify the default options, so that the plugin can usually be used as-is with no 72 | configuration. 73 | 74 | The configuration defaults should not be assumed to be static/part of the API (and typically 75 | changes should be irrelevant to most users). 76 | 77 | See the :ref:`api` docs for details on the current defaults. 78 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/cli.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | import sys 5 | 6 | from pytest_mock_resources.config import DockerContainerConfig 7 | from pytest_mock_resources.container.base import container_name, get_container 8 | from pytest_mock_resources.hooks import get_docker_client 9 | from pytest_mock_resources.plugin import find_entrypoints, load_entrypoints 10 | 11 | 12 | class StubPytestConfig: 13 | pmr_multiprocess_safe = False 14 | pmr_cleanup_container = False 15 | pmr_docker_client = None 16 | 17 | class option: # noqa: N801 18 | pmr_multiprocess_safe = False 19 | pmr_cleanup_container = False 20 | 21 | def getini(self, attr): 22 | return getattr(self, attr) 23 | 24 | 25 | def main(): 26 | entrypoints = find_entrypoints() 27 | load_entrypoints(entrypoints) 28 | 29 | parser = create_parser() 30 | args = parser.parse_args() 31 | 32 | if args.load: 33 | load_entrypoints(args.load) 34 | 35 | pytestconfig = StubPytestConfig() 36 | 37 | stop = args.stop 38 | start = not stop 39 | 40 | for fixture in args.fixtures: 41 | if fixture not in DockerContainerConfig.subclasses: 42 | valid_options = ", ".join(DockerContainerConfig.subclasses) 43 | raise argparse.ArgumentError( 44 | args.fixtures, 45 | f"'{fixture}' invalid. Valid options include: {valid_options}", 46 | ) 47 | 48 | execute(fixture, pytestconfig, start=start, stop=stop) 49 | 50 | 51 | def create_parser(): 52 | parser = argparse.ArgumentParser( 53 | description="Preemptively run docker containers to speed up initial startup of PMR Fixtures." 54 | ) 55 | parser.add_argument( 56 | "fixtures", 57 | metavar="Fixture", 58 | type=str, 59 | nargs="+", 60 | help="Available Fixtures: {}".format(", ".join(DockerContainerConfig.subclasses)), 61 | ) 62 | parser.add_argument( 63 | "--stop", action="store_true", help="Stop previously started PMR containers" 64 | ) 65 | 66 | parser.add_argument( 67 | "--load", 68 | action="append", 69 | help="Import a module in order to load 3rd party resources.", 70 | ) 71 | return parser 72 | 73 | 74 | def execute(fixture: str, pytestconfig: StubPytestConfig, start=True, stop=False): 75 | config_cls = DockerContainerConfig.subclasses[fixture] 76 | config = config_cls() 77 | 78 | if start: 79 | generator = get_container(pytestconfig, config) 80 | for _ in generator: 81 | pass 82 | 83 | if stop: 84 | docker = get_docker_client(pytestconfig) 85 | 86 | assert config.port 87 | name = container_name(fixture, int(config.port)) 88 | try: 89 | container = docker.container.inspect(name) 90 | except Exception: 91 | sys.stderr.write(f"Failed to stop {fixture} container\n") 92 | else: 93 | container.kill() 94 | 95 | 96 | if __name__ == "__main__": 97 | main() 98 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/patch/redshift/sqlalchemy.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from sqlalchemy import event 4 | from sqlalchemy.sql.base import Executable 5 | 6 | from pytest_mock_resources.compat import sqlparse 7 | from pytest_mock_resources.patch.redshift.mock_s3_copy import mock_s3_copy_command 8 | from pytest_mock_resources.patch.redshift.mock_s3_unload import mock_s3_unload_command 9 | 10 | 11 | def register_redshift_behavior(engine): 12 | """Substitute the default execute method with a custom execute for copy and unload command.""" 13 | event.listen(engine, "before_execute", receive_before_execute, retval=True) 14 | event.listen(engine, "before_cursor_execute", receive_before_cursor_execute, retval=True) 15 | 16 | 17 | def receive_before_execute( 18 | conn, clauseelement: Union[Executable, str], multiparams, params, execution_options=None 19 | ): 20 | """Handle the `before_execute` event. 21 | 22 | Specifically, this only needs to handle the parsing of multiple statements into 23 | individual cursor executions. Only the final statement's return value will be 24 | returned. 25 | """ 26 | if isinstance(clauseelement, Executable): 27 | return clauseelement, multiparams, params 28 | 29 | *statements, final_statement = parse_multiple_statements(clauseelement) 30 | 31 | cursor = conn.connection.cursor() 32 | for statement in statements: 33 | cursor.execute(statement, *multiparams, **params) 34 | 35 | return final_statement, multiparams, params 36 | 37 | 38 | def receive_before_cursor_execute(_, cursor, statement: str, parameters, context, executemany): 39 | """Handle the `before_cursor_execute` event. 40 | 41 | This is where we add support for custom features such as redshift COPY/UNLOAD because 42 | the query has already been rendered (into a string) at this point. 43 | 44 | Notably, COPY/UNLOAD need to perform extra non-sql behavior and potentially execute 45 | more than a single query and the interface requires that we return a query. Thus, 46 | we return a no-op query to be executed by sqlalchemy for certain kinds of supported 47 | extra features. 48 | """ 49 | normalized_statement = _preprocess(statement).lower() 50 | if normalized_statement.startswith("unload"): 51 | mock_s3_unload_command(statement, cursor) 52 | return "SELECT 1", {} 53 | 54 | if normalized_statement.startswith("copy"): 55 | mock_s3_copy_command(statement, cursor) 56 | context.should_autocommit = True 57 | return "SELECT 1", {} 58 | return statement, parameters 59 | 60 | 61 | def parse_multiple_statements(statement: str): 62 | """Split the given sql statement into a list of individual sql statements.""" 63 | processed_statement = _preprocess(statement) 64 | return [str(statement) for statement in sqlparse.split(processed_statement)] 65 | 66 | 67 | def _preprocess(statement: str): 68 | """Preprocess the input statement.""" 69 | statement = statement.strip() 70 | # Replace any occourance of " with '. 71 | statement = statement.replace('"', "'") 72 | if statement[-1] != ";": 73 | statement += ";" 74 | return statement 75 | -------------------------------------------------------------------------------- /docs/source/ci.rst: -------------------------------------------------------------------------------- 1 | CI Service Support 2 | ================== 3 | 4 | Depending on the CI service, access to docker-related fixtures may be different than it would be 5 | locally. As such, below is an outline of how to support those fixtures within specific CI services. 6 | 7 | CircleCi 8 | -------- 9 | CircleCI 2.0+ default jobs do not have access to a docker directly, but instead interact with 10 | a remote docker. 11 | 12 | As such, you will need to include the a step in your job to setup remote docker like so: 13 | 14 | .. code-block:: yaml 15 | 16 | steps: 17 | - setup_remote_docker 18 | - checkout 19 | ... 20 | 21 | 22 | Furthermore, you should start the service ahead of time using their mechanism of choice: 23 | 24 | For 2.0 jobs 25 | 26 | .. code-block:: yaml 27 | 28 | jobs: 29 | : 30 | docker: 31 | - image: 32 | - image: 33 | 34 | 35 | For 2.1+ jobs 36 | 37 | .. code-block:: yaml 38 | 39 | version: 2.1 40 | 41 | executors: 42 | foo: 43 | docker: 44 | - image: 45 | - image: 46 | 47 | jobs: 48 | test: 49 | executor: foo 50 | 51 | 52 | Postgres/Redshift Container 53 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 54 | 55 | Specifically for postgres/redshift, the :code:`- image: ` portion should look like 56 | 57 | .. code-block:: yaml 58 | 59 | - image: postgres:9.6.10-alpine # or whatever image/tag you'd like 60 | environment: 61 | POSTGRES_DB: dev 62 | POSTGRES_USER: user 63 | POSTGRES_PASSWORD: password 64 | 65 | 66 | You will receive a `ContainerCheckFailed: Unable to connect to [...] Postgres test container` error in CI if the above is not added to you job config. 67 | 68 | Mongo Container 69 | ~~~~~~~~~~~~~~~ 70 | 71 | Specifically for mongo, the :code:`- image: ` portion should look like 72 | 73 | .. code-block:: yaml 74 | 75 | - image: circleci/mongo:3.6.12 # or whatever image/tag you'd like 76 | command: "mongod --journal" 77 | 78 | You will receive a `ContainerCheckFailed: Unable to connect to [...] Mongo test container` error in CI if the above is not added to you job config. 79 | 80 | 81 | GitLab 82 | ------ 83 | For :code:`pytest-mock-resources` to work on GitLab use of :code:`dind` service is required. 84 | Below is a sample configuration: 85 | 86 | .. code-block:: yaml 87 | 88 | services: 89 | - docker:dind 90 | 91 | variables: 92 | DOCKER_TLS_CERTDIR: '' 93 | 94 | stages: 95 | - testing 96 | 97 | testing-job: 98 | image: python:3.6.8-slim # Use a python version that matches your project 99 | stage: testing 100 | variables: 101 | DOCKER_HOST: tcp://docker:2375 102 | PYTEST_MOCK_RESOURCES_HOST: docker 103 | before_script: 104 | - apt-get update && apt-get install -y wget libpq-dev gcc 105 | - wget -O get-docker.sh https://get.docker.com 106 | - chmod +x get-docker.sh && ./get-docker.sh 107 | script: 108 | - pip install -r requirements.txt 109 | - pytest -x tests 110 | 111 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/fixture/redis.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pytest_mock_resources.compat import redis 4 | from pytest_mock_resources.container.base import get_container 5 | from pytest_mock_resources.container.redis import RedisConfig 6 | from pytest_mock_resources.credentials import Credentials 7 | 8 | 9 | @pytest.fixture(scope="session") 10 | def pmr_redis_config(): 11 | """Override this fixture with a :class:`RedisConfig` instance to specify different defaults. 12 | 13 | Examples: 14 | >>> @pytest.fixture(scope='session') 15 | ... def pmr_redis_config(): 16 | ... return RedisConfig(image="redis:6.0") 17 | """ 18 | return RedisConfig() 19 | 20 | 21 | @pytest.fixture(scope="session") 22 | def pmr_redis_container(pytestconfig, pmr_redis_config): 23 | yield from get_container(pytestconfig, pmr_redis_config) 24 | 25 | 26 | def create_redis_fixture(scope="function", decode_responses: bool = False): 27 | """Produce a Redis fixture. 28 | 29 | Any number of fixture functions can be created. Under the hood they will all share the same 30 | database server. 31 | 32 | .. note:: 33 | 34 | If running tests in parallel, the implementation fans out to different redis "database"s, 35 | up to a 16 (which is the default container fixed limit). This means you can only run 36 | up to 16 simultaneous tests. 37 | 38 | Additionally, any calls to `flushall` or any other cross-database calls **will** still 39 | represent cross-test state. 40 | 41 | Finally, the above notes are purely describing the current implementation, and should not 42 | be assumed. In the future, the current database selection mechanism may change, or 43 | databases may not be used altogether. 44 | 45 | Args: 46 | scope (str): The scope of the fixture can be specified by the user, defaults to "function". 47 | decode_responses (bool): Whether to decode the responses from redis. 48 | 49 | Raises: 50 | KeyError: If any additional arguments are provided to the function than what is necessary. 51 | """ 52 | 53 | @pytest.fixture(scope=scope) 54 | def _(request, pmr_redis_container, pmr_redis_config): 55 | database_number = 0 56 | if hasattr(request.config, "workerinput"): 57 | worker_input = request.config.workerinput 58 | worker_id = worker_input["workerid"] # For example "gw0". 59 | database_number = int(worker_id[2:]) 60 | 61 | if database_number >= 16: 62 | raise ValueError( 63 | "The redis fixture currently only supports up to 16 parallel executions" 64 | ) 65 | 66 | db = redis.Redis( 67 | host=pmr_redis_config.host, 68 | port=pmr_redis_config.port, 69 | db=database_number, 70 | decode_responses=decode_responses or pmr_redis_config.decode_responses, 71 | ) 72 | db.flushdb() 73 | 74 | Credentials.assign_from_credentials( 75 | db, 76 | drivername="redis", 77 | host=pmr_redis_config.host, 78 | port=pmr_redis_config.port, 79 | database=database_number, 80 | username=None, 81 | password=None, 82 | ) 83 | return db 84 | 85 | return _ 86 | -------------------------------------------------------------------------------- /docs/source/sqlite.rst: -------------------------------------------------------------------------------- 1 | SQLite 2 | ====== 3 | While SQLite is a widely used database in its own right, we also aim to make SQLite a reasonable 4 | stand-in for (at least) postgres, in tests where possible. We **do** make postgres tests run as 5 | fast as possible, but tests using postgres objectively run more slowly than those with SQLite. 6 | 7 | While SQLite cannot match Postgres perfectly, in many scenarios (particularly those which use ORMs, 8 | which tend to make use of cross-compatible database features) it can be used to more quickly verify 9 | the code. And in the event that you begin using a feature only supportable in postgres, or which 10 | behaves differently in SQLite, you're one :code:`s/create_slite_fixture/create_postgres_fixture` 11 | away from resolving that problem. Additionally, you can choose to only use postgres for the subset 12 | of tests which require such features. 13 | 14 | To that end, we've extended the sqlalchemy SQLite dialect to include features to match postgres 15 | as closely as possible. We **do** however, recommend that use of this dialect is restricted 16 | purely to **tests** in order to be used as a postgres stand-in, rather than for use in actual 17 | application code. 18 | 19 | Schemas 20 | ------- 21 | As an in-memory database (for the most part), SQLite does not behave the same way when encountering 22 | schemas. 23 | 24 | For example, given sqlalchemy model defined as: 25 | 26 | .. code-block:: python 27 | 28 | from .models import ModelBase 29 | 30 | class User(ModelBase): 31 | __tablename__ ='user' 32 | __table_args__ = {'schema': 'private'} 33 | 34 | 35 | SQLite generally would produce an error upon use of that table, but will now work by default, and 36 | behave similarly to postgres. 37 | 38 | A caveat to this is that SQLite has no notion of a "search path" like in postgres. Therefore, 39 | programmatic use altering the search path from the default "public" (in postgres), or referencing 40 | a "public" table as "public.tablename" would not be supported. 41 | 42 | 43 | Foreign Keys 44 | ------------ 45 | SQLite supports FOREIGN KEY syntax when emitting CREATE statements for tables, 46 | however by default these constraints have no effect on the operation of the table. 47 | 48 | We simply, turn that support on by default, to match the postgres behavior. 49 | 50 | 51 | JSON/JSONB 52 | ---------- 53 | Tables which use either :code:`sqlalchemy.dialects.postgresql.JSON/JSONB` or 54 | :code:`sqlalchemy.types.Json` will work as they would in postgres. 55 | 56 | SQLite itself, recently added support for json natively, but this allows a much wider version 57 | range of SQLite to support that feature. 58 | 59 | 60 | Datetime (timezone support) 61 | --------------------------- 62 | By default, SQLite does not respect the :code:`Datetime(timezone=True)` flag. This means that normally 63 | a :code:`Datetime` column would behave differently from postgres. For example, where postgres 64 | would return timezone-aware :code:`datetime.datetime` objects, SQLite would return naive 65 | :code:`datetime.datetime` (which do **not** behave the same way when doing datetime math). 66 | 67 | This does **not** actually store the timezones of the datetime (as is also true for postgres). 68 | It simply matches the timezone-awareness and incoming timezone conversion behavior you see in 69 | postgres. 70 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/container/mysql.py: -------------------------------------------------------------------------------- 1 | from typing import ClassVar, Iterable 2 | 3 | import sqlalchemy 4 | 5 | from pytest_mock_resources import compat 6 | from pytest_mock_resources.config import DockerContainerConfig, fallback 7 | from pytest_mock_resources.container.base import ContainerCheckFailed 8 | 9 | 10 | class MysqlConfig(DockerContainerConfig): 11 | """Define the configuration object for MySql. 12 | 13 | Args: 14 | image (str): The docker image:tag specifier to use for mysql containers. 15 | Defaults to :code:`"mysql:5.6"`. 16 | host (str): The hostname under which a mounted port will be available. 17 | Defaults to :code:`"localhost"`. 18 | port (int): The port to bind the container to. 19 | Defaults to :code:`5532`. 20 | ci_port (int): The port to bind the container to when a CI environment is detected. 21 | Defaults to :code:`5432`. 22 | username (str): The username of the root user 23 | Defaults to :code:`"user"`. 24 | password (str): The password of the root password 25 | Defaults to :code:`"password"`. 26 | root_database (str): The name of the root database to create. 27 | Defaults to :code:`"dev"`. 28 | """ 29 | 30 | name = "mysql" 31 | _fields: ClassVar[Iterable] = { 32 | "image", 33 | "host", 34 | "port", 35 | "ci_port", 36 | "username", 37 | "password", 38 | "root_database", 39 | } 40 | _fields_defaults: ClassVar[dict] = { 41 | "image": "mysql:5.6", 42 | "port": 3406, 43 | "ci_port": 3306, 44 | # XXX: For now, username is disabled/ignored. We need root access for PMR 45 | # internals. 46 | "username": "root", 47 | "password": "password", 48 | "root_database": "dev", 49 | } 50 | 51 | @fallback 52 | def username(self): 53 | raise NotImplementedError() 54 | 55 | @fallback 56 | def password(self): 57 | raise NotImplementedError() 58 | 59 | @fallback 60 | def root_database(self): 61 | raise NotImplementedError() 62 | 63 | def ports(self): 64 | return {3306: self.port} 65 | 66 | def environment(self): 67 | return { 68 | "MYSQL_DATABASE": self.root_database, 69 | "MYSQL_ROOT_PASSWORD": self.password, 70 | } 71 | 72 | def check_fn(self): 73 | try: 74 | get_sqlalchemy_engine(self, self.root_database) 75 | except sqlalchemy.exc.OperationalError: 76 | raise ContainerCheckFailed( 77 | f"Unable to connect to a presumed MySQL test container via given config: {self}" 78 | ) 79 | 80 | 81 | def get_sqlalchemy_engine(config, database_name, **engine_kwargs): 82 | url = compat.sqlalchemy.URL( 83 | "mysql+pymysql", 84 | username=config.username, 85 | password=config.password, 86 | host=config.host, 87 | port=config.port, 88 | database=database_name, 89 | ) 90 | 91 | from pytest_mock_resources.compat import pymysql 92 | 93 | pymysql.connect 94 | 95 | engine = sqlalchemy.create_engine(url, **engine_kwargs) 96 | 97 | # Verify engine is connected 98 | engine.connect() 99 | 100 | return engine 101 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/config.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import functools 4 | import os 5 | import socket 6 | from typing import ClassVar, Iterable 7 | 8 | _DOCKER_HOST = "host.docker.internal" 9 | 10 | 11 | def is_ci(): 12 | return os.getenv("CI") == "true" 13 | 14 | 15 | @functools.lru_cache() 16 | def is_docker_host(): 17 | try: 18 | socket.gethostbyname(_DOCKER_HOST) 19 | except socket.gaierror: 20 | return False 21 | else: 22 | return True 23 | 24 | 25 | def get_env_config(name, kind, default=None): 26 | env_var = f"PMR_{name.upper()}_{kind.upper()}" 27 | return os.environ.get(env_var, default) 28 | 29 | 30 | def fallback(fn): 31 | attr = fn.__name__ 32 | 33 | @property 34 | @functools.wraps(fn) 35 | def wrapper(self): 36 | value = get_env_config(self.name, attr) 37 | if value is not None: 38 | return value 39 | 40 | if self.has(attr): 41 | return self.get(attr) 42 | 43 | try: 44 | return fn(self) 45 | except NotImplementedError: 46 | if attr in self._fields_defaults: 47 | return self._fields_defaults[attr] 48 | return None 49 | 50 | return wrapper 51 | 52 | 53 | class DockerContainerConfig: 54 | name: ClassVar[str] 55 | 56 | _fields: ClassVar[Iterable] = {"image", "host", "port", "ci_port", "container_args"} 57 | _fields_defaults: ClassVar[dict] = {} 58 | 59 | subclasses: ClassVar[dict[str, type[DockerContainerConfig]]] = {} 60 | 61 | @classmethod 62 | def __init_subclass__(cls): 63 | DockerContainerConfig.subclasses[cls.name] = cls 64 | 65 | def __init__(self, **kwargs): 66 | for field, value in kwargs.items(): 67 | if field not in self._fields: 68 | continue 69 | 70 | attr = f"_{field}" 71 | setattr(self, attr, value) 72 | 73 | def __repr__(self): 74 | cls_name = self.__class__.__name__ 75 | return "{cls_name}({attrs})".format( 76 | cls_name=cls_name, 77 | attrs=", ".join(f"{attr}={getattr(self, attr)!r}" for attr in self._fields), 78 | ) 79 | 80 | def has(self, attr): 81 | attr_name = f"_{attr}" 82 | return hasattr(self, attr_name) 83 | 84 | def get(self, attr): 85 | attr_name = f"_{attr}" 86 | return getattr(self, attr_name) 87 | 88 | def set(self, attr, value): 89 | attr_name = f"_{attr}" 90 | return setattr(self, attr_name, value) 91 | 92 | @fallback 93 | def image(self): 94 | raise NotImplementedError() 95 | 96 | @fallback 97 | def ci_port(self): 98 | raise NotImplementedError() 99 | 100 | @fallback 101 | def host(self): 102 | if os.environ.get("PYTEST_MOCK_RESOURCES_HOST") is not None: 103 | return os.environ.get("PYTEST_MOCK_RESOURCES_HOST") 104 | 105 | if is_docker_host(): 106 | return _DOCKER_HOST 107 | 108 | return "localhost" 109 | 110 | @fallback 111 | def port(self): 112 | ci_port = self.ci_port 113 | if ci_port and is_ci(): 114 | return ci_port 115 | 116 | raise NotImplementedError() 117 | 118 | @fallback 119 | def container_args(self): 120 | return () 121 | 122 | def ports(self): 123 | return {} 124 | 125 | def environment(self): 126 | return {} 127 | 128 | def check_fn(self): 129 | pass 130 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/credentials.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.orm import Session 2 | 3 | from pytest_mock_resources import compat 4 | 5 | 6 | class Credentials: 7 | """Return as `pmr_credentials` attribute on supported docker-based fixtures. 8 | 9 | Examples: 10 | It's also directly dict-able. 11 | >>> creds = Credentials('d', 'l', 'p', 'baz', 'user', 'pass') 12 | >>> dict_creds = dict(creds) 13 | """ 14 | 15 | def __init__(self, drivername, host, port, database, username, password): 16 | self.drivername = drivername 17 | self.host = host 18 | self.port = port 19 | self.database = str(database) 20 | self.username = username 21 | self.password = password 22 | 23 | def __iter__(self): 24 | for item in self.__dict__: 25 | yield (item, self[item]) 26 | 27 | def __getitem__(self, item): 28 | return self.__dict__[item] 29 | 30 | def as_url(self): 31 | """Return a stringified dbapi URL string.""" 32 | return self.as_sqlalchemy_url().render_as_string(hide_password=False) 33 | 34 | def as_sqlalchemy_url(self): 35 | """Return a sqlalchemy :class:`sqlalchemy.engine.url.URL`.""" 36 | return compat.sqlalchemy.URL( 37 | drivername=self.drivername, 38 | host=self.host, 39 | port=self.port, 40 | database=self.database, 41 | username=self.username, 42 | password=self.password, 43 | ) 44 | 45 | def as_sqlalchemy_url_kwargs(self): 46 | """Return the valid arguments to sqlalchemy :class:`sqlalchemy.engine.url.URL`.""" 47 | return dict(self) 48 | 49 | def as_psycopg2_kwargs(self): 50 | """Return the valid arguments to sqlalchemy :class:`psycopg2.connect`.""" 51 | return { 52 | "host": self.host, 53 | "port": self.port, 54 | "user": self.username, 55 | "password": self.password, 56 | "dbname": self.database, 57 | } 58 | 59 | def as_mongo_kwargs(self): 60 | """Return the valid arguments to a mongo client.""" 61 | return { 62 | "host": self.host, 63 | "port": self.port, 64 | "username": self.username, 65 | "password": self.password, 66 | "authSource": self.database, 67 | } 68 | 69 | def as_redis_kwargs(self): 70 | """Return the valid arguments to a redis client.""" 71 | return { 72 | "host": self.host, 73 | "port": self.port, 74 | "db": int(self.database), 75 | "username": self.username, 76 | "password": self.password, 77 | } 78 | 79 | @classmethod 80 | def assign_from_connection(cls, connection): 81 | if isinstance(connection, Session): 82 | url = connection.connection().engine.url 83 | else: 84 | url = connection.url 85 | 86 | instance = cls( 87 | drivername=url.drivername, 88 | host=url.host, 89 | port=url.port, 90 | username=url.username, 91 | password=url.password, 92 | database=url.database, 93 | ) 94 | connection.pmr_credentials = instance 95 | return instance 96 | 97 | @classmethod 98 | def assign_from_credentials(cls, engine, **credentials): 99 | instance = Credentials(**credentials) 100 | engine.pmr_credentials = instance 101 | return instance 102 | -------------------------------------------------------------------------------- /docs/source/moto.rst: -------------------------------------------------------------------------------- 1 | Moto 2 | ==== 3 | 4 | Users can test AWS dependent code using the `create_moto_fixture`. 5 | 6 | .. autofunction:: pytest_mock_resources.create_moto_fixture 7 | 8 | Consider the following example: 9 | 10 | .. code-block:: python 11 | 12 | # src/some_module.py 13 | 14 | def list_files(s3_client): 15 | return s3_client.list_objects_v2(Bucket="x", Key="y") 16 | 17 | A user could test this as follows: 18 | 19 | .. code-block:: python 20 | 21 | # tests/some_test.py 22 | 23 | from pytest_mock_resources import create_moto_fixture 24 | from pytest_mock_resources.fixture.moto import Session 25 | 26 | from some_module import list_files 27 | 28 | moto = create_moto_fixture() 29 | 30 | def test_list_files(moto: Session): 31 | s3_client = moto.client("s3") 32 | files = list_files(s3_client) 33 | assert ... 34 | 35 | 36 | The test is handed a proxy-object which should functionally act like a `boto3.Session` 37 | object. Namely you would generally want to call `.client(...)` or `.resource(...)` on it. 38 | 39 | .. note:: 40 | 41 | Each test executes using a different (fake) AWS account through moto. If you create 42 | boto3 ``client``/``resource`` objects using boto3 directly, outside of the object 43 | handed to your test, you should make sure to pass all the credentials fields into the 44 | constructor such that it targets the correct AWS instance/account. 45 | 46 | For example: 47 | 48 | .. code-block:: python 49 | 50 | import boto3 51 | from pytest_mock_resources import create_moto_fixture 52 | from pytest_mock_resources.fixture.moto import Session 53 | 54 | moto = create_moto_fixture() 55 | 56 | def test_list_files(moto: Session): 57 | kwargs = moto.pmr_credentials.as_kwargs() 58 | s3_client = boto3.client("s3", **kwargs) 59 | 60 | 61 | .. note:: 62 | 63 | A moto dashboard should be available for debugging while the container is running. 64 | By default it would be available at ``http://localhost:5555/moto-api/#`` (but 65 | the exact URL may be different depending on your host/port config. 66 | 67 | 68 | Actions 69 | ------- 70 | Similar to ordered "actions" in other fixtures, moto supports the static creation of 71 | certain kinds of objects ahead of the actual test execution as well. 72 | 73 | For moto, this represents as physical infrastructure/configuration/objects within 74 | the moto "AWS account" being used by your test. 75 | 76 | Per the note above, each test executes in a unique moto "AWS account". This means 77 | that two ``create_moto_fixture`` fixtures with different infrastructure will be 78 | completely distinct and not leak state (either between tests or between fixtures). 79 | 80 | .. note:: 81 | 82 | we will absolutely accept feedback on additional kinds of supported objects 83 | to add, the current set is motivated by internal use, but is certainly not exhaustive. 84 | 85 | S3 86 | ~~ 87 | Currently: :class:`S3Bucket`, :class:`S3Object` 88 | 89 | These objects help reduce boilerplate around setting up buckets/files among tests. 90 | 91 | .. code-block:: python 92 | 93 | from pytest_mock_resources import create_moto_fixture, S3Bucket, S3Object 94 | from pytest_mock_resources.fixture.moto import Session 95 | 96 | bucket = S3Bucket("test") 97 | moto = create_moto_fixture( 98 | S3Bucket("other_bucket"), 99 | bucket, 100 | bucket.object("test.csv", "a,b,c\n1,2,3"), 101 | ) 102 | 103 | def test_ls(moto: Session): 104 | resource = moto.resource("s3") 105 | objects = resource.Bucket("test").objects.all() 106 | assert len(objects) == 1 107 | 108 | assert objects[0].key == "test.txt" 109 | assert objects[0].get()["Body"].read() == b"a,b,c\n1,2,3" 110 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/fixture/mysql.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import sqlalchemy 3 | from sqlalchemy import text 4 | 5 | from pytest_mock_resources.container.base import get_container 6 | from pytest_mock_resources.container.mysql import get_sqlalchemy_engine, MysqlConfig 7 | from pytest_mock_resources.sqlalchemy import EngineManager 8 | 9 | 10 | @pytest.fixture(scope="session") 11 | def pmr_mysql_config(): 12 | """Override this fixture with a :class:`MysqlConfig` instance to specify different defaults. 13 | 14 | Examples: 15 | >>> @pytest.fixture(scope='session') 16 | ... def pmr_mysql_config(): 17 | ... return MysqlConfig(image="mysql:5.2", root_database="foo") 18 | """ 19 | return MysqlConfig() 20 | 21 | 22 | @pytest.fixture(scope="session") 23 | def pmr_mysql_container(pytestconfig, pmr_mysql_config): 24 | yield from get_container(pytestconfig, pmr_mysql_config, interval=1, retries=60) 25 | 26 | 27 | def create_mysql_fixture( 28 | *ordered_actions, 29 | scope="function", 30 | tables=None, 31 | session=None, 32 | engine_kwargs=None, 33 | ): 34 | """Produce a MySQL fixture. 35 | 36 | Any number of fixture functions can be created. Under the hood they will all share the same 37 | database server. 38 | 39 | Arguments: 40 | ordered_actions: Any number of ordered actions to be run on test setup. 41 | scope: Passthrough pytest's fixture scope. 42 | tables: Subsets the tables created by `ordered_actions`. This is generally 43 | most useful when a model-base was specified in `ordered_actions`. 44 | session: Whether to return a session instead of an engine directly. This can 45 | either be a bool or a callable capable of producing a session. 46 | engine_kwargs: Optional set of kwargs to send into the engine on creation. 47 | """ 48 | 49 | @pytest.fixture(scope=scope) 50 | def _(pmr_mysql_container, pmr_mysql_config): 51 | database_name = _create_clean_database(pmr_mysql_config) 52 | engine = get_sqlalchemy_engine( 53 | pmr_mysql_config, 54 | database_name, 55 | **(engine_kwargs or {}), 56 | ) 57 | 58 | engine_manager = EngineManager.create( 59 | fixture="mysql", 60 | dynamic_actions=ordered_actions, 61 | tables=tables, 62 | session=session, 63 | ) 64 | for _, conn in engine_manager.manage_sync(engine): 65 | yield conn 66 | 67 | return _ 68 | 69 | 70 | def _create_clean_database(config): 71 | root_engine = get_sqlalchemy_engine(config, config.root_database, isolation_level="AUTOCOMMIT") 72 | 73 | with root_engine.begin() as conn: 74 | try: 75 | conn.execute( 76 | text( 77 | """ 78 | CREATE TABLE IF NOT EXISTS pytest_mock_resource_db( 79 | id serial 80 | ); 81 | """ 82 | ) 83 | ) 84 | except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.ProgrammingError): 85 | # A race condition may occur during table creation if: 86 | # - another process has already created the table 87 | # - the current process begins creating the table 88 | # - the other process commits the table creation 89 | # - the current process tries to commit the table creation 90 | pass 91 | 92 | conn.execute(text("INSERT INTO pytest_mock_resource_db VALUES (DEFAULT)")) 93 | result = conn.execute(text("SELECT LAST_INSERT_ID()")) 94 | id_ = next(iter(result))[0] 95 | database_name = f"pytest_mock_resource_db_{id_}" 96 | 97 | conn.execute(text(f"CREATE DATABASE {database_name}")) 98 | 99 | return database_name 100 | -------------------------------------------------------------------------------- /tests/fixture/test_pmr_credentials.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from sqlalchemy import create_engine, text 3 | 4 | from pytest_mock_resources import ( 5 | create_mongo_fixture, 6 | create_moto_fixture, 7 | create_mysql_fixture, 8 | create_postgres_fixture, 9 | create_redis_fixture, 10 | create_redshift_fixture, 11 | create_sqlite_fixture, 12 | ) 13 | 14 | mongo = create_mongo_fixture() 15 | moto = create_moto_fixture() 16 | mysql = create_mysql_fixture() 17 | mysql_session = create_mysql_fixture(session=True) 18 | pg = create_postgres_fixture() 19 | pg_session = create_postgres_fixture(session=True) 20 | pg_async = create_postgres_fixture(async_=True) 21 | pg_async_session = create_postgres_fixture(async_=True, session=True) 22 | redis = create_redis_fixture() 23 | redshift = create_redshift_fixture() 24 | redshift_session = create_redshift_fixture(session=True) 25 | redshift_async = create_redshift_fixture(async_=True) 26 | redshift_async_session = create_redshift_fixture(async_=True, session=True) 27 | sqlite = create_sqlite_fixture() 28 | sqlite_session = create_sqlite_fixture(session=True) 29 | 30 | 31 | def test_mongo_pmr_credentials(mongo): 32 | assert mongo.pmr_credentials 33 | 34 | 35 | def test_moto_pmr_credentials(moto): 36 | assert moto 37 | assert moto.pmr_credentials.aws_access_key_id 38 | assert moto.pmr_credentials.aws_secret_access_key 39 | 40 | 41 | def test_mysql_pmr_credentials(mysql): 42 | credentials = mysql.pmr_credentials 43 | verify_relational(mysql, credentials) 44 | 45 | 46 | def test_mysql_session_pmr_credentials(mysql_session): 47 | credentials = mysql_session.pmr_credentials 48 | verify_relational(mysql_session, credentials, session=True) 49 | 50 | 51 | def test_postgres_pmr_credentials(pg): 52 | credentials = pg.pmr_credentials 53 | verify_relational(pg, credentials) 54 | 55 | 56 | def test_postgres_session_pmr_credentials(pg_session): 57 | credentials = pg_session.pmr_credentials 58 | verify_relational(pg_session, credentials, session=True) 59 | 60 | 61 | @pytest.mark.asyncio 62 | async def test_postgres_async_pmr_credentials(pg_async): 63 | assert pg_async.sync_engine.pmr_credentials 64 | 65 | 66 | @pytest.mark.asyncio 67 | async def test_postgres_async_session_pmr_credentials(pg_async_session): 68 | assert (await pg_async_session.connection()).sync_engine.pmr_credentials 69 | 70 | 71 | def test_redis_pmr_credentials(redis): 72 | assert redis.pmr_credentials 73 | 74 | 75 | def test_redshift_pmr_credentials(redshift): 76 | credentials = redshift.pmr_credentials 77 | verify_relational(redshift, credentials) 78 | 79 | 80 | def test_redshift_session_pmr_credentials(redshift_session): 81 | credentials = redshift_session.pmr_credentials 82 | verify_relational(redshift_session, credentials, session=True) 83 | 84 | 85 | @pytest.mark.asyncio 86 | async def test_redshift_async_pmr_credentials(redshift_async): 87 | assert redshift_async.sync_engine.pmr_credentials 88 | 89 | 90 | @pytest.mark.asyncio 91 | async def test_redshift_async_session_pmr_credentials(redshift_async_session): 92 | assert (await redshift_async_session.connection()).sync_engine.pmr_credentials 93 | 94 | 95 | def test_sqlite_pmr_credentials(sqlite): 96 | assert sqlite.pmr_credentials 97 | 98 | 99 | def test_sqlite_session_pmr_credentials(sqlite_session): 100 | assert sqlite_session.pmr_credentials 101 | 102 | 103 | def verify_relational(connection, credentials, session=False): 104 | """Verify connection to the same database as the one given to the test function, using credentials.""" 105 | assert credentials 106 | 107 | queries = [ 108 | text("create table foo (id integer)"), 109 | text("commit"), 110 | ] 111 | if not session: 112 | with connection.begin() as conn: 113 | for query in queries: 114 | conn.execute(query) 115 | else: 116 | for query in queries: 117 | connection.execute(query) 118 | 119 | manual_engine = create_engine(credentials.as_url()) 120 | with manual_engine.connect() as conn: 121 | conn.execute(text("select * from foo")) 122 | -------------------------------------------------------------------------------- /tests/fixture/redshift/test_patch.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import sqlalchemy.exc 3 | from sqlalchemy import text 4 | 5 | from pytest_mock_resources import create_postgres_fixture, create_redshift_fixture 6 | from tests import skip_if_sqlalchemy2 7 | from tests.fixture.redshift.utils import ( 8 | copy_fn_to_test_create_engine_patch, 9 | copy_fn_to_test_psycopg2_connect_patch, 10 | copy_fn_to_test_psycopg2_connect_patch_as_context_manager, 11 | COPY_TEMPLATE, 12 | setup_table_and_bucket, 13 | unload_fn_to_test_create_engine_patch, 14 | unload_fn_to_test_psycopg2_connect_patch, 15 | unload_fn_to_test_psycopg2_connect_patch_as_context_manager, 16 | ) 17 | 18 | redshift = create_redshift_fixture() 19 | postgres = create_postgres_fixture() 20 | 21 | 22 | def test_copy(redshift): 23 | copy_fn_to_test_create_engine_patch(redshift) 24 | 25 | 26 | def test_unload(redshift): 27 | unload_fn_to_test_create_engine_patch(redshift) 28 | 29 | 30 | def test_copy_with_psycopg2(redshift): 31 | config = redshift.pmr_credentials.as_psycopg2_kwargs() 32 | copy_fn_to_test_psycopg2_connect_patch(config) 33 | 34 | 35 | def test_copy_with_psycopg2_as_context_manager(redshift): 36 | config = redshift.pmr_credentials.as_psycopg2_kwargs() 37 | copy_fn_to_test_psycopg2_connect_patch_as_context_manager(config) 38 | 39 | 40 | def test_unload_with_psycopg2(redshift): 41 | config = redshift.pmr_credentials.as_psycopg2_kwargs() 42 | unload_fn_to_test_psycopg2_connect_patch(config) 43 | 44 | 45 | def test_unload_with_psycopg2_as_context_manager(redshift): 46 | config = redshift.pmr_credentials.as_psycopg2_kwargs() 47 | unload_fn_to_test_psycopg2_connect_patch_as_context_manager(config) 48 | 49 | 50 | def test_tightly_scoped_patch(redshift, postgres): 51 | """Assert psycopg2's patch is tightly scoped 52 | 53 | Redshift combined with a 2nd non-redshift fixture in the same test should not 54 | add redshift-specific features to the other engine. 55 | """ 56 | import moto 57 | 58 | copy_command = text( 59 | COPY_TEMPLATE.format( 60 | COMMAND="COPY", 61 | LOCATION="s3://mybucket/file.csv", 62 | COLUMNS="", 63 | FROM="from", 64 | CREDENTIALS="credentials", 65 | OPTIONAL_ARGS="", 66 | ) 67 | ) 68 | with moto.mock_s3(): 69 | setup_table_and_bucket(redshift) 70 | setup_table_and_bucket(postgres, create_bucket=False) 71 | with redshift.begin() as conn: 72 | conn.execute(copy_command) 73 | 74 | with pytest.raises(sqlalchemy.exc.ProgrammingError) as e: 75 | with postgres.begin() as conn: 76 | conn.execute(copy_command) 77 | 78 | assert 'syntax error at or near "credentials"' in str(e.value) 79 | 80 | 81 | redshift_engine = create_redshift_fixture(session=False) 82 | redshift_session = create_redshift_fixture(session=True) 83 | async_redshift_engine = create_redshift_fixture(session=False, async_=True) 84 | async_redshift_session = create_redshift_fixture(session=True, async_=True) 85 | 86 | 87 | @skip_if_sqlalchemy2 88 | def test_event_listener_registration_engine(redshift_engine): 89 | with redshift_engine.connect() as conn: 90 | result = conn.execute("select 1; select 1").scalar() 91 | 92 | result = redshift_engine.execute("select 1; select 1").scalar() 93 | assert result == 1 94 | 95 | 96 | @skip_if_sqlalchemy2 97 | def test_event_listener_registration_session(redshift_session): 98 | result = redshift_session.execute("select 1; select 1").scalar() 99 | assert result == 1 100 | 101 | 102 | def test_event_listener_registration_text(redshift_session): 103 | result = redshift_session.execute(text("select 1; select 1")).scalar() 104 | assert result == 1 105 | 106 | 107 | @pytest.mark.asyncio 108 | async def test_event_listener_registration_async_engine(async_redshift_engine): 109 | async with async_redshift_engine.connect() as conn: 110 | result = await conn.execute(text("select 1")) 111 | value = result.scalar() 112 | assert value == 1 113 | 114 | 115 | @pytest.mark.asyncio 116 | async def test_event_listener_registration_async_session(async_redshift_session): 117 | result = await async_redshift_session.execute(text("select 1")) 118 | value = result.scalar() 119 | assert value == 1 120 | -------------------------------------------------------------------------------- /docs/source/relational/template-database.rst: -------------------------------------------------------------------------------- 1 | .. _template-database: 2 | 3 | Template Databases 4 | ================== 5 | 6 | .. note:: 7 | 8 | This feature was added in v2.4.0 and **currently only supports Postgres**. 9 | 10 | The `template_database` fixture keyword argument, and :class:``StaticStatements`` 11 | did not exist prior to this version. 12 | 13 | By default, the supported fixtures attempt to amortize the cost of performing fixture 14 | setup through the creation of database templates 15 | (`postgres `_). 16 | If, for whatever reason, this feature does not interact well with your test setup, 17 | you can disable the behavior by setting ``template_database=False``. 18 | 19 | With this feature enabled, all actions considered to be "safe" to statically 20 | will performed exactly once per test session, in a template database. 21 | This amortizes their initial cost and offloads the majority of the work to postgres 22 | itself. Then all "dynamic" actions will be performed on a per-test-database basis. 23 | 24 | Consider the following fixture 25 | 26 | .. code-block:: python 27 | 28 | from models import Base, Example 29 | from pytest_mock_resources import create_postgres_fixture, StaticStatements, Statements, Rows 30 | 31 | def some_setup(engine): 32 | # some complex logic, not able to be performed as a `Statements` 33 | 34 | pg = create_postgres_fixture( 35 | Base, 36 | Rows(Example(id=1)), 37 | StaticStatements('INSERT INTO foo () values ()'), 38 | Statements('INSERT INTO foo () values ()'), 39 | some_setup, 40 | ) 41 | 42 | 43 | Each of the arguments given to ``create_postgres_fixture`` above are "actions" performed 44 | in the given order. Typically (in particular for non-postgres fixtures, today), 45 | all of the steps would be performed on a completely empty database prior to the 46 | engine/session being handed to the test function. 47 | 48 | Static Actions 49 | -------------- 50 | Static actions are actions which are safe to be executed exactly once, because they 51 | have predictable semantics which both safely be executed once per test session, 52 | as well as happen in a completely separate transactions and database, from the 53 | one handed to the test. 54 | 55 | Static actions include: 56 | 57 | * ``MetaData``: ``Base`` in the above example is an alias to ``Base.metadata``; 58 | i.e. a :class:`sqlalchemy.MetaData` object that creates all objects defined on the metadata. 59 | * ``Rows``: Rows only work in the context of a session, and essentially define a set of 60 | ``INSERT`` statements to be run. 61 | * ``StaticStatements``: Are exactly like a :class:`Statements` object. The subclass 62 | is simply a sentinel to signify that the user asserts their statement is "safe" 63 | to be executed as one of these static actions. 64 | 65 | 66 | Dynamic Actions 67 | --------------- 68 | Dynamic actions have unpredictable semantics, and as such the library cannot 69 | safely amortize their cost. 70 | 71 | Dynamic actions include: 72 | 73 | * ``Statements``: A statement can be any arbitrary SQL, which therefore means we cannot 74 | know whether it will react negatively with the test, if executed in a separate 75 | transaction. If you're executing typical ``CREATE``/``INSERT`` statements, 76 | prefer ``StaticStatements``. 77 | 78 | * Functions: Obviously a function can do anything it wants, and therefore must be 79 | dynamic. 80 | 81 | 82 | .. admonition:: warning 83 | 84 | It is important to consider action ordering when using dynamic actions. Upon 85 | encountering a dynamic action in a list of ordered actions, all subsequent 86 | actions will be executed as though they were dynamic (i.e. per-test-database). 87 | 88 | You should therefore prefer to group all static actions before dynamic ones 89 | wherever possible to ensure you get the most optimal amortization of actions. 90 | 91 | For example: 92 | 93 | .. code-block:: python 94 | 95 | pg = create_postgres_fixture( 96 | Statements('CREATE TABLE ...'), 97 | Base, 98 | ) 99 | 100 | This will execute all setup dynamically because it encountered a dynamic action 101 | (``Statements`` in this case) first. Ideally the above actions would be reversed, 102 | or that the ``Statements`` be swapped for a ``StaticStatements``. 103 | 104 | 105 | .. _postgres_template_database: https://www.postgresql.org/docs/current/manage-ag-templatedbs.html 106 | -------------------------------------------------------------------------------- /tests/fixture/test_mongo.py: -------------------------------------------------------------------------------- 1 | from pytest_mock_resources import create_mongo_fixture 2 | 3 | mongo = create_mongo_fixture() 4 | 5 | 6 | def test_basic_mongo_fixture(mongo): 7 | collections = mongo.list_collection_names() 8 | assert collections == [] 9 | 10 | 11 | def test_collection_exists(mongo): 12 | collection = mongo["customers"] 13 | to_insert = {"name": "John", "address": "Highway 37"} 14 | collection.insert_one(to_insert) 15 | collections = mongo.list_collection_names() 16 | assert "customers" in collections 17 | 18 | 19 | def test_insert_one(mongo): 20 | collection = mongo["customers"] 21 | to_insert = {"name": "John", "address": "Highway 37"} 22 | collection.insert_one(to_insert) 23 | returned = collection.find_one() 24 | assert returned == to_insert 25 | 26 | 27 | def test_insert_all(mongo): 28 | collection = mongo["customers"] 29 | to_insert = [ 30 | {"name": "Amy", "address": "Apple st 652"}, 31 | {"name": "Hannah", "address": "Mountain 21"}, 32 | {"name": "Michael", "address": "Valley 345"}, 33 | {"name": "Sandy", "address": "Ocean blvd 2"}, 34 | {"name": "Betty", "address": "Green Grass 1"}, 35 | {"name": "Richard", "address": "Sky st 331"}, 36 | {"name": "Susan", "address": "One way 98"}, 37 | {"name": "Vicky", "address": "Yellow Garden 2"}, 38 | {"name": "Ben", "address": "Park Lane 38"}, 39 | {"name": "William", "address": "Central st 954"}, 40 | {"name": "Chuck", "address": "Main Road 989"}, 41 | {"name": "Viola", "address": "Sideway 1633"}, 42 | ] 43 | collection.insert_many(to_insert) 44 | result = collection.find().sort("name") 45 | returned = list(result) 46 | assert returned == sorted(to_insert, key=lambda x: x["name"]) 47 | 48 | 49 | def test_query(mongo): 50 | collection = mongo["customers"] 51 | to_insert = [ 52 | {"name": "John", "address": "Highway 37"}, 53 | {"name": "Viola", "address": "Highway 37"}, 54 | ] 55 | collection.insert_many(to_insert) 56 | 57 | query = {"address": "Highway 37"} 58 | result = collection.find(query).sort("name") 59 | returned = list(result) 60 | assert returned == sorted(to_insert, key=lambda x: x["name"]) 61 | 62 | 63 | def test_delete_one(mongo): 64 | collection = mongo["customers"] 65 | to_insert = [ 66 | {"name": "John", "address": "Highway 37"}, 67 | {"name": "Viola", "address": "Highway 37"}, 68 | ] 69 | collection.insert_many(to_insert) 70 | 71 | query = {"name": {"$regex": "^J"}} 72 | collection.delete_one(query) 73 | result = collection.find().sort("name") 74 | returned = list(result) 75 | 76 | assert returned == sorted(to_insert[1:], key=lambda x: x["name"]) 77 | 78 | 79 | def test_delete_all(mongo): 80 | collection = mongo["customers"] 81 | to_insert = [ 82 | {"name": "John", "address": "Highway 37"}, 83 | {"name": "Viola", "address": "Highway 37"}, 84 | ] 85 | collection.insert_many(to_insert) 86 | 87 | query = {"address": "Highway 37"} 88 | collection.delete_many(query) 89 | 90 | result = collection.find() 91 | returned = list(result) 92 | 93 | assert returned == [] 94 | 95 | 96 | mongo_1 = create_mongo_fixture() 97 | mongo_2 = create_mongo_fixture() 98 | mongo_3 = create_mongo_fixture() 99 | 100 | 101 | def test_multiple_mongos(mongo_1, mongo_2, mongo_3): 102 | def validate_isolation(db_client): 103 | collection = db_client["customers"] 104 | to_insert = [ 105 | {"name": "John", "address": "Highway 37"}, 106 | {"name": "Viola", "address": "Highway 37"}, 107 | ] 108 | collection.insert_many(to_insert) 109 | 110 | result = collection.find().sort("name") 111 | returned = list(result) 112 | 113 | assert returned == to_insert 114 | 115 | validate_isolation(mongo_1) 116 | validate_isolation(mongo_2) 117 | validate_isolation(mongo_3) 118 | 119 | 120 | def test_create_custom_connection(mongo): 121 | from pymongo import MongoClient 122 | 123 | client = MongoClient(**mongo.pmr_credentials.as_mongo_kwargs()) 124 | db = client[mongo.pmr_credentials.database] 125 | 126 | collection = db["customers"] 127 | to_insert = [ 128 | {"name": "John", "address": "Highway 37"}, 129 | {"name": "Viola", "address": "Highway 37"}, 130 | ] 131 | collection.insert_many(to_insert) 132 | 133 | result = collection.find().sort("name") 134 | returned = list(result) 135 | 136 | assert returned == to_insert 137 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "pytest-mock-resources" 3 | version = "2.12.4" 4 | description = "A pytest plugin for easily instantiating reproducible mock resources." 5 | authors = [ 6 | "Omar Khan ", 7 | "Dan Cardin ", 8 | "Gabriel Michael ", 9 | "Prateek Pisat ", 10 | ] 11 | license = "MIT" 12 | keywords = ["pytest", "sqlalchemy", "docker", "fixture", "mock"] 13 | classifiers = ["Framework :: Pytest"] 14 | repository = "https://github.com/schireson/pytest-mock-resources" 15 | packages = [{ include = "pytest_mock_resources", from = "src" }] 16 | readme = 'README.md' 17 | include = ["*.md", "py.typed"] 18 | 19 | [tool.poetry.dependencies] 20 | python = ">=3.7, <4" 21 | 22 | pytest = { version = ">=1.0" } 23 | sqlalchemy = { version = ">1.0, !=1.4.0, !=1.4.1, !=1.4.2, !=1.4.3, !=1.4.4, !=1.4.5, !=1.4.6, !=1.4.7, !=1.4.8, !=1.4.9, !=1.4.10, !=1.4.11, !=1.4.12, !=1.4.13, !=1.4.14, !=1.4.15, !=1.4.16, !=1.4.17, !=1.4.18, !=1.4.19, !=1.4.20, !=1.4.21, !=1.4.22, !=1.4.23" } 24 | 25 | typing_extensions = "*" 26 | importlib-metadata = { version = "*", python = "<3.8" } 27 | 28 | # extra [postgres] 29 | psycopg2 = { version = "*", optional = true } 30 | psycopg2-binary = { version = "*", optional = true } 31 | asyncpg = { version = "*", optional = true } 32 | 33 | # extra [redshift] 34 | moto = { version = "*", optional = true } 35 | boto3 = { version = "*", optional = true } 36 | sqlparse = { version = "*", optional = true } 37 | 38 | # extra [mongo] 39 | pymongo = { version = "*", optional = true } 40 | 41 | # extra [redis] 42 | redis = { version = "*", optional = true } 43 | 44 | # extra [mysql] 45 | pymysql = { version = ">=1.0", optional = true } 46 | 47 | # extra [docker] 48 | filelock = { version = "*", optional = true } 49 | python-on-whales = { version = ">=0.22.0", optional = true } 50 | 51 | [tool.poetry.dev-dependencies] 52 | botocore = "1.33.13" 53 | coverage = "*" 54 | moto = ">=2.3.2" 55 | mypy = { version = "0.982" } 56 | pytest-asyncio = "*" 57 | pytest-xdist = "*" 58 | responses = ">=0.23.0" 59 | ruff = "0.1.15" 60 | types-PyMySQL = "^1.0.2" 61 | types-dataclasses = "^0.6.5" 62 | types-filelock = "^3.2.7" 63 | types-redis = "^3.5.6" 64 | types-requests = "*" 65 | types-six = "^1.16.0" 66 | 67 | [tool.poetry.extras] 68 | docker = ['python-on-whales', 'filelock'] 69 | postgres = ['psycopg2', 'python-on-whales', 'filelock'] 70 | postgres-binary = ['psycopg2-binary', 'python-on-whales', 'filelock'] 71 | postgres-async = ['asyncpg', 'python-on-whales', 'filelock'] 72 | redshift = ['boto3', 'moto', 'sqlparse', 'python-on-whales', 'filelock'] 73 | mongo = ['pymongo', 'python-on-whales', 'filelock'] 74 | moto = ['boto3', 'python-on-whales', 'filelock'] 75 | redis = ['redis', 'python-on-whales', 'filelock'] 76 | mysql = ['pymysql', 'python-on-whales', 'filelock'] 77 | 78 | [tool.poetry.plugins.pytest11] 79 | pytest_mock_resources = "pytest_mock_resources" 80 | 81 | [tool.poetry.scripts] 82 | pmr = "pytest_mock_resources.cli:main" 83 | 84 | [tool.ruff] 85 | src = ["src", "tests"] 86 | line-length = 100 87 | 88 | target-version = "py37" 89 | select = ["C", "D", "E", "F", "I", "N", "Q", "RET", "RUF", "S", "T", "UP", "YTT"] 90 | ignore = [ 91 | "D1", 92 | "D203", 93 | "D204", 94 | "D213", 95 | "D215", 96 | "D400", 97 | "D404", 98 | "D406", 99 | "D407", 100 | "D408", 101 | "D409", 102 | "D413", 103 | "C901", 104 | "E501", 105 | "S101", 106 | ] 107 | 108 | [tool.ruff.isort] 109 | known-first-party = ["pytest_mock_resources", "tests"] 110 | order-by-type = false 111 | 112 | [tool.ruff.per-file-ignores] 113 | "tests/**/*.py" = ["D", "S", "RUF012", "N801"] 114 | 115 | 116 | [tool.mypy] 117 | strict_optional = true 118 | ignore_missing_imports = true 119 | warn_unused_ignores = true 120 | incremental = true 121 | plugins = 'sqlalchemy.ext.mypy.plugin' 122 | exclude = 'tests/examples' 123 | 124 | [tool.pytest.ini_options] 125 | doctest_optionflags = "NORMALIZE_WHITESPACE IGNORE_EXCEPTION_DETAIL ELLIPSIS" 126 | addopts = "--doctest-modules -vv --ff --strict-markers" 127 | norecursedirs = ".* build dist *.egg tests/examples" 128 | pytester_example_dir = "tests/examples" 129 | pmr_multiprocess_safe = true 130 | markers = ["postgres", "redshift", "mongo", "redis", "mysql"] 131 | filterwarnings = [ 132 | "error", 133 | "ignore:There is no current event loop:DeprecationWarning", 134 | "ignore:stream argument is deprecated. Use stream parameter in request directly:DeprecationWarning", 135 | "ignore:Boto3 will no longer support Python 3.7.*::boto3", 136 | "ignore:datetime.datetime.utcnow.*:DeprecationWarning", 137 | ] 138 | 139 | [build-system] 140 | requires = ["poetry_core==1.0.8"] 141 | build-backend = "poetry.core.masonry.api" 142 | -------------------------------------------------------------------------------- /tests/fixture/test_database.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from sqlalchemy import create_engine, text 3 | 4 | from pytest_mock_resources import ( 5 | compat, 6 | create_mysql_fixture, 7 | create_postgres_fixture, 8 | create_redshift_fixture, 9 | create_sqlite_fixture, 10 | ) 11 | from pytest_mock_resources.sqlalchemy import EngineManager 12 | from tests import skip_if_not_sqlalchemy2 13 | 14 | sqlite = create_sqlite_fixture() 15 | postgres = create_postgres_fixture() 16 | redshift = create_redshift_fixture() 17 | mysql = create_mysql_fixture() 18 | 19 | 20 | def test_basic_sqlite_fixture(sqlite): 21 | with sqlite.connect() as conn: 22 | conn.execute(text("select 1")) 23 | 24 | 25 | def test_basic_postgres_fixture(postgres): 26 | with postgres.connect() as conn: 27 | conn.execute(text("select 1")) 28 | 29 | 30 | def test_basic_redshift_fixture(redshift): 31 | with redshift.connect() as conn: 32 | conn.execute(text("select 1")) 33 | 34 | 35 | def test_basic_postgres_and_redshift_fixture(postgres, redshift): 36 | with postgres.connect() as conn: 37 | conn.execute(text("select 1")) 38 | 39 | with redshift.connect() as conn: 40 | conn.execute(text("select 1")) 41 | 42 | 43 | def test_basic_mysql_fixture(mysql): 44 | with mysql.connect() as conn: 45 | conn.execute(text("select 1")) 46 | 47 | 48 | redshift_2 = create_redshift_fixture() 49 | redshift_3 = create_redshift_fixture() 50 | postgres_2 = create_postgres_fixture() 51 | mysql_2 = create_mysql_fixture() 52 | 53 | 54 | def test_multiple_postgres_and_redshift_fixture( 55 | postgres_2, postgres, redshift_2, redshift_3, redshift 56 | ): 57 | with postgres_2.connect() as conn: 58 | conn.execute(text("select 1")) 59 | with postgres.connect() as conn: 60 | conn.execute(text("select 1")) 61 | with redshift_2.connect() as conn: 62 | conn.execute(text("select 1")) 63 | with redshift_3.connect() as conn: 64 | conn.execute(text("select 1")) 65 | with redshift.connect() as conn: 66 | conn.execute(text("select 1")) 67 | 68 | 69 | def test_multiple_mysql_fixture(mysql_2, mysql): 70 | with mysql.connect() as conn: 71 | conn.execute(text("select 1")) 72 | with mysql_2.connect() as conn: 73 | conn.execute(text("select 1")) 74 | 75 | 76 | postgres_3 = create_postgres_fixture() 77 | 78 | 79 | def test_create_custom_connection(postgres_3): 80 | creds = postgres_3.pmr_credentials 81 | engine = create_engine( 82 | "postgresql://{username}:{password}@{host}:{port}/{database}?sslmode=disable".format( 83 | database=creds.database, 84 | username=creds.username, 85 | password=creds.password, 86 | host=creds.host, 87 | port=creds.port, 88 | ), 89 | isolation_level="AUTOCOMMIT", 90 | ) 91 | 92 | with engine.connect() as conn: 93 | conn.execute(text("select 1")) 94 | 95 | 96 | def test_create_custom_connection_from_dict(postgres_3): 97 | engine = create_engine( 98 | "postgresql://{username}:{password}@{host}:{port}/{database}?sslmode=disable".format( 99 | **dict(postgres_3.pmr_credentials) 100 | ), 101 | isolation_level="AUTOCOMMIT", 102 | ) 103 | 104 | with engine.connect() as conn: 105 | conn.execute(text("select 1")) 106 | 107 | 108 | def test_create_custom_connection_url(postgres_3): 109 | url = compat.sqlalchemy.URL(**postgres_3.pmr_credentials.as_sqlalchemy_url_kwargs()) 110 | engine = create_engine(url, isolation_level="AUTOCOMMIT") 111 | with engine.connect() as conn: 112 | conn.execute(text("select 1")) 113 | 114 | 115 | def test_bad_actions(postgres): 116 | with pytest.raises(ValueError) as e: 117 | EngineManager.create(dynamic_actions=["random_string"], fixture="postgres") 118 | 119 | assert ( 120 | "`random_string` invalid: create_postgres_fixture function accepts " 121 | "`sqlalchemy.orm.decl_api.DeclarativeMeta`, `collections.abc.Callable`, " 122 | "`sqlalchemy.sql.schema.MetaData`, or `AbstractAction` subclasses as inputs." 123 | in str(e.value) 124 | ) 125 | 126 | 127 | postgres_async = create_postgres_fixture(async_=True) 128 | 129 | 130 | @pytest.mark.asyncio 131 | @skip_if_not_sqlalchemy2 132 | async def test_basic_postgres_fixture_async(postgres_async): 133 | async with postgres_async.connect() as conn: 134 | await conn.execute(text("select 1")) 135 | 136 | 137 | @skip_if_not_sqlalchemy2 138 | def test_engine_reuse(postgres_async, event_loop): 139 | async def execute(async_engine): 140 | async with async_engine.connect() as conn: 141 | await conn.execute(text("select 1")) 142 | 143 | event_loop.run_until_complete(execute(postgres_async)) 144 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/fixture/redshift/__init__.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pytest_mock_resources.container.base import get_container 4 | from pytest_mock_resources.container.redshift import RedshiftConfig 5 | from pytest_mock_resources.fixture.base import asyncio_fixture, generate_fixture_id 6 | from pytest_mock_resources.fixture.postgresql import _async_fixture, _sync_fixture 7 | from pytest_mock_resources.patch.redshift import psycopg2, sqlalchemy 8 | 9 | 10 | @pytest.fixture(scope="session") 11 | def pmr_redshift_config(): 12 | """Override this fixture with a :class:`RedshiftConfig` instance to specify different defaults. 13 | 14 | Note that, by default, redshift uses a postgres container. 15 | 16 | Examples: 17 | >>> @pytest.fixture(scope='session') 18 | ... def pmr_redshift_config(): 19 | ... return RedshiftConfig(image="postgres:9.6.10", root_database="foo") 20 | """ 21 | return RedshiftConfig() 22 | 23 | 24 | @pytest.fixture(scope="session") 25 | def pmr_redshift_container(pytestconfig, pmr_redshift_config): 26 | yield from get_container(pytestconfig, pmr_redshift_config) 27 | 28 | 29 | def create_redshift_fixture( 30 | *ordered_actions, 31 | scope="function", 32 | tables=None, 33 | session=None, 34 | async_=False, 35 | createdb_template="template1", 36 | engine_kwargs=None, 37 | template_database=True, 38 | actions_share_transaction=None, 39 | ): 40 | """Produce a Redshift fixture. 41 | 42 | Any number of fixture functions can be created. Under the hood they will all share the same 43 | database server. 44 | 45 | Note that, by default, redshift uses a postgres container as the database server 46 | and attempts to reintroduce approximations of Redshift features, such as 47 | S3 COPY/UNLOAD, redshift-specific functions, and other specific behaviors. 48 | 49 | Arguments: 50 | ordered_actions: Any number of ordered actions to be run on test setup. 51 | scope: Passthrough pytest's fixture scope. 52 | tables: Subsets the tables created by `ordered_actions`. This is generally 53 | most useful when a model-base was specified in `ordered_actions`. 54 | session: Whether to return a session instead of an engine directly. This can 55 | either be a bool or a callable capable of producing a session. 56 | async_: Whether to return an async fixture/client. 57 | createdb_template: The template database used to create sub-databases. "template1" is the 58 | default chosen when no template is specified. 59 | engine_kwargs: Optional set of kwargs to send into the engine on creation. 60 | template_database: Defaults to True. When True, amortizes the cost of performing database 61 | setup through `ordered_actions`, by performing them once into a postgres "template" 62 | database, then creating all subsequent per-test databases from that template. 63 | actions_share_transaction: When True, the transaction used by `ordered_actions` context 64 | will be the same as the one handed to the test function. This is required in order 65 | to support certain usages of `ordered_actions, such as the creation of temp tables 66 | through a `Statements` object. By default, this behavior is enabled for synchronous 67 | fixtures for backwards compatibility; and disabled by default for 68 | asynchronous fixtures (the way v2-style/async features work in SQLAlchemy can lead 69 | to bad default behavior). 70 | """ 71 | from pytest_mock_resources.fixture.redshift.udf import REDSHIFT_UDFS 72 | 73 | fixture_id = generate_fixture_id(enabled=template_database, name="pg") 74 | 75 | ordered_actions = (*ordered_actions, REDSHIFT_UDFS) 76 | engine_kwargs_ = engine_kwargs or {} 77 | 78 | engine_manager_kwargs = { 79 | "ordered_actions": ordered_actions, 80 | "tables": tables, 81 | "createdb_template": createdb_template, 82 | "session": session, 83 | "fixture_id": fixture_id, 84 | "actions_share_transaction": actions_share_transaction, 85 | } 86 | 87 | @pytest.fixture(scope=scope) 88 | def _sync(*_, pmr_redshift_container, pmr_redshift_config): 89 | for engine, conn in _sync_fixture( 90 | pmr_redshift_config, 91 | engine_manager_kwargs, 92 | engine_kwargs_, 93 | fixture="redshift", 94 | ): 95 | sqlalchemy.register_redshift_behavior(engine) 96 | with psycopg2.patch_connect(pmr_redshift_config, engine.url.database): 97 | yield conn 98 | 99 | async def _async(*_, pmr_redshift_container, pmr_redshift_config): 100 | fixture = _async_fixture( 101 | pmr_redshift_config, 102 | engine_manager_kwargs, 103 | engine_kwargs_, 104 | fixture="redshift", 105 | ) 106 | async for engine, conn in fixture: 107 | sqlalchemy.register_redshift_behavior(engine.sync_engine) 108 | yield conn 109 | 110 | if async_: 111 | return asyncio_fixture(_async, scope=scope) 112 | return _sync 113 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | push: 8 | branches: 9 | - main 10 | 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.ref }} 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | test: 17 | # Ubuntu 22 supports python3.7, ubuntu-latest does not and so we need to use an old frozen version. 18 | # Until we decide to drop support for python3.7 19 | runs-on: ubuntu-22.04 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | # Test our minimum version bound, the highest version available, 24 | # and something in the middle (i.e. what gets run locally). 25 | python-version: ["3.7.17", "3.9", "3.12"] 26 | pytest-asyncio-version: ["0.16.0", "0.19.0"] 27 | sqlalchemy-version: ["1.3.0", "1.4.0", "2.0.0"] 28 | 29 | steps: 30 | - uses: actions/checkout@v3 31 | - name: Set up Python 32 | uses: actions/setup-python@v4 33 | with: 34 | python-version: ${{ matrix.python-version }} 35 | architecture: x64 36 | 37 | - name: Install poetry 38 | uses: abatilo/actions-poetry@v2.0.0 39 | with: 40 | poetry-version: 1.5.1 41 | 42 | - name: Set up cache 43 | uses: actions/cache@v3 44 | with: 45 | path: ~/.cache/pypoetry/virtualenvs 46 | key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }} 47 | restore-keys: | 48 | ${{ runner.os }}-poetry- 49 | 50 | - name: Install base dependencies 51 | run: poetry run make install-base 52 | 53 | - name: Install specific sqlalchemy version 54 | run: | 55 | poetry run pip install 'sqlalchemy~=${{ matrix.sqlalchemy-version }}' 56 | 57 | - name: Install specific pytest-asyncio version 58 | run: pip install 'pytest-asyncio~=${{ matrix.pytest-asyncio-version }}' 59 | 60 | - if: ${{ matrix.python-version == '3.9' && matrix.sqlalchemy-version == '2.0.0' }} 61 | run: poetry run make lint 62 | 63 | - run: poetry run make test-base 64 | 65 | - name: Install dependencies 66 | run: poetry run make install 67 | 68 | - run: poetry run make test 69 | 70 | - name: Store test result artifacts 71 | uses: actions/upload-artifact@v4 72 | with: 73 | path: coverage-${{ matrix.python-version }}-${{ matrix.pytest-asyncio-version }}-${{ matrix.sqlalchemy-version }}.xml 74 | 75 | - name: Coveralls 76 | env: 77 | COVERALLS_FLAG_NAME: run-${{ inputs.working-directory }} 78 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 79 | COVERALLS_PARALLEL: true 80 | run: | 81 | pip install tomli coveralls 82 | coveralls --service=github 83 | 84 | # Tests that postgres async fixture functions without psycopg2 installed. 85 | test-asyncpg: 86 | runs-on: ubuntu-latest 87 | strategy: 88 | fail-fast: false 89 | matrix: 90 | # Test our minimum version bound, the highest version available, 91 | # and something in the middle (i.e. what gets run locally). 92 | python-version: ["3.9"] 93 | sqlalchemy-version: ["1.4.0"] 94 | 95 | steps: 96 | - uses: actions/checkout@v3 97 | - name: Set up Python 98 | uses: actions/setup-python@v4 99 | with: 100 | python-version: ${{ matrix.python-version }} 101 | architecture: x64 102 | 103 | - name: Install poetry 104 | uses: abatilo/actions-poetry@v2.0.0 105 | with: 106 | poetry-version: 1.5.1 107 | 108 | - name: Set up cache 109 | uses: actions/cache@v3 110 | with: 111 | path: ~/.cache/pypoetry/virtualenvs 112 | key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }} 113 | restore-keys: | 114 | ${{ runner.os }}-poetry- 115 | 116 | - name: Install base dependencies 117 | run: poetry run poetry install -E postgres-async 118 | 119 | - name: Install specific sqlalchemy version 120 | run: | 121 | poetry run pip install 'sqlalchemy~=${{ matrix.sqlalchemy-version }}' 122 | 123 | - run: SQLALCHEMY_WARN_20=1 poetry run coverage run -a -m pytest src tests -vv -m 'asyncio' 124 | 125 | - name: Store test result artifacts 126 | uses: actions/upload-artifact@v4 127 | with: 128 | path: coverage-${{ matrix.python-version }}-${{ matrix.pytest-asyncio-version }}-${{ matrix.sqlalchemy-version }}-asyncpg.xml 129 | 130 | - name: Coveralls 131 | env: 132 | COVERALLS_FLAG_NAME: run-${{ inputs.working-directory }} 133 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 134 | COVERALLS_PARALLEL: true 135 | run: | 136 | pip install tomli coveralls 137 | coveralls --service=github 138 | 139 | finish: 140 | needs: 141 | - test 142 | - test-asyncpg 143 | runs-on: ubuntu-latest 144 | steps: 145 | - name: Coveralls Finished 146 | env: 147 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 148 | run: | 149 | pip install tomli coveralls 150 | coveralls --service=github --finish 151 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/container/postgres.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from typing import ClassVar, Iterable, Optional 3 | 4 | import sqlalchemy 5 | import sqlalchemy.exc 6 | 7 | from pytest_mock_resources.compat.sqlalchemy import URL 8 | from pytest_mock_resources.config import DockerContainerConfig, fallback 9 | from pytest_mock_resources.container.base import ContainerCheckFailed 10 | 11 | if sys.version_info < (3, 8): 12 | from importlib_metadata import Distribution 13 | else: 14 | from importlib.metadata import Distribution 15 | 16 | 17 | class PostgresConfig(DockerContainerConfig): 18 | """Define the configuration object for postgres. 19 | 20 | Args: 21 | image (str): The docker image:tag specifier to use for postgres containers. 22 | Defaults to :code:`"postgres:9.6.10-alpine"`. 23 | host (str): The hostname under which a mounted port will be available. 24 | Defaults to :code:`"localhost"`. 25 | port (int): The port to bind the container to. 26 | Defaults to :code:`5532`. 27 | ci_port (int): The port to bind the container to when a CI environment is detected. 28 | Defaults to :code:`5432`. 29 | username (str): The username of the root postgres user 30 | Defaults to :code:`"user"`. 31 | password (str): The password of the root postgres password 32 | Defaults to :code:`"password"`. 33 | root_database (str): The name of the root postgres database to create. 34 | Defaults to :code:`"dev"`. 35 | drivername (str): The sqlalchemy driver to use 36 | Defaults to :code:`"postgresql+psycopg2"`. 37 | """ 38 | 39 | name = "postgres" 40 | _fields: ClassVar[Iterable] = { 41 | "image", 42 | "host", 43 | "port", 44 | "ci_port", 45 | "username", 46 | "password", 47 | "root_database", 48 | "drivername", 49 | } 50 | _fields_defaults: ClassVar[dict] = { 51 | "image": "postgres:9.6.10-alpine", 52 | "port": 5532, 53 | "ci_port": 5432, 54 | "username": "user", 55 | "password": "password", 56 | "root_database": "dev", 57 | "drivername": None, 58 | } 59 | 60 | @fallback 61 | def username(self): 62 | raise NotImplementedError() 63 | 64 | @fallback 65 | def password(self): 66 | raise NotImplementedError() 67 | 68 | @fallback 69 | def root_database(self): 70 | raise NotImplementedError() 71 | 72 | @fallback 73 | def drivername(self): 74 | raise NotImplementedError() 75 | 76 | def ports(self): 77 | return {5432: self.port} 78 | 79 | def environment(self): 80 | return { 81 | "POSTGRES_DB": self.root_database, 82 | "POSTGRES_USER": self.username, 83 | "POSTGRES_PASSWORD": self.password, 84 | } 85 | 86 | def check_fn(self): 87 | import socket 88 | 89 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 90 | try: 91 | s.connect((self.host, int(self.port))) 92 | except (OSError, ConnectionRefusedError): 93 | raise ContainerCheckFailed( 94 | f"Unable to connect to a presumed Postgres test container via given config: {self}" 95 | ) 96 | finally: 97 | s.close() 98 | 99 | 100 | def get_sqlalchemy_engine(config, database_name, async_=False, autocommit=False, **engine_kwargs): 101 | # For backwards compatibility, our hardcoded default is psycopg2, and async fixtures 102 | # will not work with psycopg2, so we instead swap the default to the preferred async driver. 103 | drivername = detect_driver(config.drivername, async_=async_) 104 | 105 | url = URL( 106 | drivername=drivername, 107 | host=config.host, 108 | port=config.port, 109 | username=config.username, 110 | password=config.password, 111 | database=database_name, 112 | ) 113 | 114 | if autocommit: 115 | engine_kwargs["isolation_level"] = "AUTOCOMMIT" 116 | 117 | if async_ or getattr(url.get_dialect(), "is_async", None): 118 | from sqlalchemy.ext.asyncio import create_async_engine 119 | 120 | engine = create_async_engine(url, **engine_kwargs) 121 | else: 122 | engine = sqlalchemy.create_engine(url, **engine_kwargs) 123 | 124 | return engine 125 | 126 | 127 | def detect_driver(drivername: Optional[str] = None, async_: bool = False) -> str: 128 | if drivername: 129 | return drivername 130 | 131 | sqlalchemy = Distribution.from_name(name="sqlalchemy") 132 | if sqlalchemy.version >= "2.0" and any(Distribution.discover(name="psycopg")): 133 | return "postgresql+psycopg" 134 | 135 | if async_: 136 | if any(Distribution.discover(name="asyncpg")): 137 | return "postgresql+asyncpg" 138 | else: 139 | if any(Distribution.discover(name="psycopg2")) or any( 140 | Distribution.discover(name="psycopg2-binary") 141 | ): 142 | return "postgresql+psycopg2" 143 | 144 | raise ValueError( # pragma: no cover 145 | "No suitable driver found for Postgres. Please install `psycopg`, `psycopg2`, " 146 | "`asyncpg`, or explicitly configure the `drivername=` field of `PostgresConfig`." 147 | ) 148 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/hooks.py: -------------------------------------------------------------------------------- 1 | import os 2 | import warnings 3 | 4 | _resource_kinds = ["postgres", "redshift", "mongo", "redis", "mysql", "moto"] 5 | 6 | 7 | def pytest_addoption(parser): 8 | parser.addini( 9 | "pmr_multiprocess_safe", 10 | "Enables multiprocess-safe mode", 11 | type="bool", 12 | default=False, 13 | ) 14 | parser.addini( 15 | "pmr_cleanup_container", 16 | "Optionally disable attempts to cleanup created containers", 17 | type="bool", 18 | default=True, 19 | ) 20 | parser.addini( 21 | "pmr_docker_client", 22 | "Optional docker client name to use: docker, podman, nerdctl", 23 | type="string", 24 | default=None, 25 | ) 26 | 27 | group = parser.getgroup("collect") 28 | group.addoption( 29 | "--pmr-multiprocess-safe", 30 | action="store_true", 31 | default=False, 32 | help="Enable multiprocess-safe mode", 33 | dest="pmr_multiprocess_safe", 34 | ) 35 | group.addoption( 36 | "--pmr-cleanup-container", 37 | action="store_true", 38 | default=True, 39 | help="Optionally disable attempts to cleanup created containers", 40 | dest="pmr_cleanup_container", 41 | ) 42 | group.addoption( 43 | "--pmr-docker-client", 44 | default=None, 45 | help="Optional docker client name to use: docker, podman, nerdctl", 46 | dest="pmr_docker_client", 47 | ) 48 | 49 | 50 | def get_pytest_flag(config, name, *, default=None): 51 | value = getattr(config.option, name, default) 52 | if value: 53 | return value 54 | 55 | return config.getini(name) 56 | 57 | 58 | def use_multiprocess_safe_mode(config): 59 | return bool(get_pytest_flag(config, "pmr_multiprocess_safe")) 60 | 61 | 62 | def get_docker_client_name(config) -> str: 63 | pmr_docker_client = os.getenv("PMR_DOCKER_CLIENT") 64 | if pmr_docker_client: 65 | return pmr_docker_client 66 | 67 | docker_client = get_pytest_flag(config, "pmr_docker_client") 68 | if docker_client: 69 | return docker_client 70 | 71 | import shutil 72 | 73 | for client_name in ["docker", "podman", "nerdctl"]: 74 | if shutil.which(client_name): 75 | break 76 | else: 77 | client_name = "docker" 78 | 79 | config.option.pmr_docker_client = client_name 80 | return client_name 81 | 82 | 83 | def get_docker_client(config): 84 | from python_on_whales.docker_client import DockerClient 85 | 86 | client_name = get_docker_client_name(config) 87 | return DockerClient(client_call=[client_name]) 88 | 89 | 90 | def pytest_configure(config): 91 | """Register markers for each resource kind.""" 92 | for resource_kind in _resource_kinds: 93 | config.addinivalue_line( 94 | "markers", 95 | f"{resource_kind}: Tests which make use of {resource_kind} fixtures", 96 | ) 97 | 98 | config._pmr_containers = [] 99 | 100 | 101 | def pytest_itemcollected(item): 102 | """Attach markers to each test which uses a fixture of one of the resources.""" 103 | if not hasattr(item, "fixturenames"): 104 | return 105 | 106 | fixturenames = set(item.fixturenames) 107 | for resource_kind in _resource_kinds: 108 | resource_fixture = f"pmr_{resource_kind}_container" 109 | if resource_fixture in fixturenames: 110 | item.add_marker(resource_kind) 111 | 112 | 113 | def pytest_sessionfinish(session, exitstatus): 114 | config = session.config 115 | 116 | if not use_multiprocess_safe_mode(config): 117 | return 118 | 119 | # In the context of multiprocess pytest invocations like pytest-xdist, 120 | # `workerinput` will be `None` in only the root pytest call. 121 | workerinput = getattr(session.config, "workerinput", None) 122 | if workerinput is not None: 123 | return 124 | 125 | # docker-based fixtures should be optional based on the selected extras. 126 | try: 127 | docker = get_docker_client(config) 128 | except ImportError: 129 | return 130 | 131 | # We ought to avoid performing deep imports here, this file is auto-loaded 132 | # by pytest plugin machinery. 133 | from pytest_mock_resources.container.base import get_tmp_root, load_container_lockfile 134 | 135 | # Kind of a neat side-effect of using the below lock file is that if past 136 | # PMR runs failed to clean up their container, subsequent runs. Ironically 137 | # this might also lead to literal concurrent runs of unrelated PMR-enabled 138 | # pytest runs to clobber one another...:shrug:. 139 | roots = [get_tmp_root(session.config), get_tmp_root(session.config, parent=True)] 140 | for fn in roots: 141 | with load_container_lockfile(fn) as containers: 142 | if not containers: 143 | continue 144 | 145 | while containers: 146 | container_id = containers.pop(0) 147 | 148 | try: 149 | container = docker.container.inspect(container_id) 150 | except Exception: 151 | warnings.warn( 152 | f"Unrecognized container {container_id}. You may need to manually delete/edit {fn}" 153 | ) 154 | else: 155 | try: 156 | container.kill() 157 | except Exception: 158 | warnings.warn(f"Failed to kill container {container_id}") 159 | 160 | fn.unlink() 161 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/fixture/moto/base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import time 4 | from dataclasses import dataclass 5 | 6 | import pytest 7 | 8 | from pytest_mock_resources.action import validate_actions 9 | from pytest_mock_resources.compat import boto3 10 | from pytest_mock_resources.container.base import get_container 11 | from pytest_mock_resources.container.moto import endpoint_url, MotoConfig 12 | from pytest_mock_resources.fixture.base import Scope 13 | from pytest_mock_resources.fixture.moto.action import apply_ordered_actions, MotoAction 14 | 15 | 16 | @pytest.fixture(scope="session") 17 | def pmr_moto_config(): 18 | """Override this fixture with a :class:`MotoConfig` instance to specify different defaults. 19 | 20 | Examples: 21 | >>> @pytest.fixture(scope='session') 22 | ... def pmr_moto_config(): 23 | ... return MotoConfig(image="motoserver/moto:latest") 24 | """ 25 | return MotoConfig() 26 | 27 | 28 | @pytest.fixture(scope="session") 29 | def pmr_moto_container(pytestconfig, pmr_moto_config): 30 | yield from get_container(pytestconfig, pmr_moto_config) 31 | 32 | 33 | def create_moto_fixture( 34 | *ordered_actions: MotoAction, 35 | region_name: str = "us-east-1", 36 | scope: Scope = "function", 37 | ): 38 | """Produce a Moto fixture. 39 | 40 | Any number of fixture functions can be created. Under the hood they will all share the same 41 | moto server. 42 | 43 | .. note:: 44 | 45 | Each test executes using a different (fake) AWS account through moto. If you create 46 | boto3 ``client``/``resource`` objects outside of the one handed to the test (for example, 47 | in the code under test), they should be sure to use the ``aws_access_key_id``, 48 | ``aws_secret_access_key``, ``aws_session_token``, and ``endpoint_url`` given by the 49 | ``.pmr_credentials`` attribute. 50 | 51 | .. note:: 52 | 53 | A moto dashboard should be available for debugging while the container is running. 54 | By default it would be available at ``http://localhost:5555/moto-api/#`` (but 55 | the exact URL may be different depending on your host/port config. 56 | 57 | Args: 58 | ordered_actions: Any number of ordered actions to be run on test setup. 59 | region_name (str): The name of the AWS region to use, defaults to "us-east-1". 60 | scope (str): The scope of the fixture can be specified by the user, defaults to "function". 61 | """ 62 | validate_actions(ordered_actions, fixture="moto") 63 | 64 | @pytest.fixture(scope=scope) 65 | def _fixture(pmr_moto_container, pmr_moto_config) -> Session: 66 | url = endpoint_url(pmr_moto_config) 67 | credentials = Credentials.from_endpoint_url(url, region_name=region_name) 68 | 69 | session = Session( 70 | boto3.Session( 71 | aws_access_key_id=credentials.aws_access_key_id, 72 | aws_secret_access_key=credentials.aws_secret_access_key, 73 | aws_session_token=credentials.aws_session_token, 74 | region_name=region_name, 75 | ), 76 | endpoint_url=credentials.endpoint_url, 77 | pmr_credentials=credentials, 78 | ) 79 | apply_ordered_actions(session, ordered_actions) 80 | return session 81 | 82 | return _fixture 83 | 84 | 85 | @dataclass 86 | class Credentials: 87 | aws_access_key_id: str 88 | aws_secret_access_key: str 89 | aws_session_token: str 90 | endpoint_url: str 91 | region_name: str = "us-east-1" 92 | 93 | @classmethod 94 | def from_endpoint_url( 95 | cls, url: str, account_id: str | None = None, region_name: str = "us-east-1" 96 | ): 97 | if account_id is None: 98 | # Attempt at a cross-process way of generating unique 12-character integers. 99 | account_id = str(time.time_ns())[:12] 100 | 101 | sts = boto3.client( 102 | "sts", 103 | endpoint_url=url, 104 | aws_access_key_id="test", 105 | aws_secret_access_key="test", # noqa: S106 106 | region_name=region_name, 107 | ) 108 | response = sts.assume_role( 109 | RoleArn=f"arn:aws:iam::{account_id}:role/my-role", 110 | RoleSessionName="test-session-name", 111 | ExternalId="test-external-id", 112 | ) 113 | 114 | return cls( 115 | aws_access_key_id=response["Credentials"]["AccessKeyId"], 116 | aws_secret_access_key=response["Credentials"]["SecretAccessKey"], 117 | aws_session_token=response["Credentials"]["SessionToken"], 118 | endpoint_url=url, 119 | region_name=region_name, 120 | ) 121 | 122 | def as_kwargs(self): 123 | return { 124 | "aws_access_key_id": self.aws_access_key_id, 125 | "aws_secret_access_key": self.aws_secret_access_key, 126 | "aws_session_token": self.aws_session_token, 127 | "endpoint_url": self.endpoint_url, 128 | "region_name": self.region_name, 129 | } 130 | 131 | 132 | @dataclass 133 | class Session: 134 | """Wrap the vanilla boto3 Session object, automatically inserting the endpoint_url field.""" 135 | 136 | session: boto3.Session 137 | endpoint_url: str 138 | pmr_credentials: Credentials 139 | 140 | def client(self, service_name, **kwargs): 141 | return self.session.client(service_name, endpoint_url=self.endpoint_url, **kwargs) 142 | 143 | def resource(self, service_name, **kwargs): 144 | return self.session.resource(service_name, endpoint_url=self.endpoint_url, **kwargs) 145 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![CircleCI](https://img.shields.io/circleci/build/gh/schireson/pytest-mock-resources/master) 2 | [![codecov](https://codecov.io/gh/schireson/pytest-mock-resources/branch/master/graph/badge.svg)](https://codecov.io/gh/schireson/pytest-mock-resources) 3 | [![Documentation 4 | Status](https://readthedocs.org/projects/pytest-mock-resources/badge/?version=latest)](https://pytest-mock-resources.readthedocs.io/en/latest/?badge=latest) 5 | 6 | ## Introduction 7 | 8 | Code which depends on external resources such a databases (postgres, redshift, etc) can be difficult 9 | to write automated tests for. Conventional wisdom might be to mock or stub out the actual database 10 | calls and assert that the code works correctly before/after the calls. 11 | 12 | However take the following, _simple_ example: 13 | 14 | ```python 15 | def serialize(users): 16 | return [ 17 | { 18 | 'user': user.serialize(), 19 | 'address': user.address.serialize(), 20 | 'purchases': [p.serialize() for p in user.purchases], 21 | } 22 | for user in users 23 | ] 24 | 25 | def view_function(session): 26 | users = session.query(User).join(Address).options(selectinload(User.purchases)).all() 27 | return serialize(users) 28 | ``` 29 | 30 | Sure, you can test `serialize`, but whether the actual **query** did the correct thing _truly_ 31 | requires that you execute the query. 32 | 33 | ## The Pitch 34 | 35 | Having tests depend upon a **real** postgres instance running somewhere is a pain, very fragile, and 36 | prone to issues across machines and test failures. 37 | 38 | Therefore `pytest-mock-resources` (primarily) works by managing the lifecycle of docker containers 39 | and providing access to them inside your tests. 40 | 41 | As such, this package makes 2 primary assumptions: 42 | 43 | - You're using `pytest` (hopefully that's appropriate, given the package name) 44 | - For many resources, `docker` is required to be available and running (or accessible through remote 45 | docker). 46 | 47 | If you aren't familiar with Pytest Fixtures, you can read up on them in the [Pytest 48 | documentation](https://docs.pytest.org/en/latest/fixture.html). 49 | 50 | In the above example, your test file could look something like 51 | 52 | ```python 53 | from pytest_mock_resources import create_postgres_fixture 54 | from models import ModelBase 55 | 56 | pg = create_postgres_fixture(ModelBase, session=True) 57 | 58 | def test_view_function_empty_db(pg): 59 | response = view_function(pg) 60 | assert response == ... 61 | 62 | def test_view_function_user_without_purchases(pg): 63 | pg.add(User(...)) 64 | pg.flush() 65 | 66 | response = view_function(pg) 67 | assert response == ... 68 | 69 | def test_view_function_user_with_purchases(pg): 70 | pg.add(User(..., purchases=[Purchase(...)])) 71 | pg.flush() 72 | 73 | response = view_function(pg) 74 | assert response == ... 75 | ``` 76 | 77 | ## Existing Resources (many more possible) 78 | 79 | - SQLite 80 | 81 | ```python 82 | from pytest_mock_resources import create_sqlite_fixture 83 | ``` 84 | 85 | - Postgres 86 | 87 | ```python 88 | from pytest_mock_resources import create_postgres_fixture 89 | ``` 90 | 91 | - Redshift 92 | 93 | **note** Uses postgres under the hood, but the fixture tries to support as much redshift 94 | functionality as possible (including redshift's `COPY`/`UNLOAD` commands). 95 | 96 | ```python 97 | from pytest_mock_resources import create_redshift_fixture 98 | ``` 99 | 100 | - Mongo 101 | 102 | ```python 103 | from pytest_mock_resources import create_mongo_fixture 104 | ``` 105 | 106 | - Redis 107 | 108 | ```python 109 | from pytest_mock_resources import create_redis_fixture 110 | ``` 111 | 112 | - MySQL 113 | 114 | ```python 115 | from pytest_mock_resources import create_mysql_fixture 116 | ``` 117 | 118 | - Moto 119 | 120 | ```python 121 | from pytest_mock_resources import create_moto_fixture 122 | ``` 123 | 124 | ## Features 125 | 126 | General features include: 127 | 128 | - Support for "actions" which pre-populate the resource you're mocking before the test 129 | - [Async fixtures](https://pytest-mock-resources.readthedocs.io/en/latest/async.html) 130 | - Custom configuration for container/resource startup 131 | 132 | ## Installation 133 | 134 | ```bash 135 | # Basic fixture support i.e. SQLite 136 | pip install "pytest-mock-resources" 137 | 138 | # General, docker-based fixture support 139 | pip install "pytest-mock-resources[docker]" 140 | 141 | # Mongo fixture support, installs `pymongo` 142 | pip install "pytest-mock-resources[mongo]" 143 | 144 | # Moto fixture support, installs non-driver extras specific to moto support 145 | pip install "pytest-mock-resources[moto]" 146 | 147 | # Redis fixture support, Installs `redis` client 148 | pip install "pytest-mock-resources[redis]" 149 | 150 | # Redshift fixture support, installs non-driver extras specific to redshift support 151 | pip install "pytest-mock-resources[redshift]" 152 | ``` 153 | 154 | Additionally there are number of **convenience** extras currently provided 155 | for installing drivers/clients of specific features. However in most cases, 156 | you **should** already be installing the driver/client used for that fixture 157 | as as first-party dependency of your project. 158 | 159 | As such, we recommend against using these extras, and instead explcitly depending 160 | on the package in question in your own project's 1st party dependencies. 161 | 162 | ```bash 163 | # Installs psycopg2/psycopg2-binary driver 164 | pip install "pytest-mock-resources[postgres-binary]" 165 | pip install "pytest-mock-resources[postgres]" 166 | 167 | # Installs asyncpg driver 168 | pip install "pytest-mock-resources[postgres-async]" 169 | 170 | # Installs pymysql driver 171 | pip install "pytest-mock-resources[mysql]" 172 | ``` 173 | 174 | ## Possible Future Resources 175 | 176 | - Rabbit Broker 177 | - AWS Presto 178 | 179 | Feel free to file an [issue](https://github.com/schireson/pytest-mock-resources/issues) if you find 180 | any bugs or want to start a conversation around a mock resource you want implemented! 181 | 182 | ## Python 2 183 | 184 | Releases in the 1.x series were supportive of python 2. However starting from 2.0.0, support for 185 | python 2 was dropped. We may accept bugfix PRs for the 1.x series, however new development and 186 | features will not be backported. 187 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/fixture/redshift/udf.py: -------------------------------------------------------------------------------- 1 | import enum 2 | 3 | from sqlalchemy import text 4 | 5 | from pytest_mock_resources.sqlalchemy import Statements 6 | 7 | 8 | @enum.unique 9 | class UdfLanguage(enum.Enum): 10 | PLPGSQL = "plpgsql" 11 | PLPYTHON = "plpythonu" 12 | SQL = "SQL" 13 | 14 | 15 | def create_udf(name, args, returns, body, language, schema="public"): 16 | _template = """ 17 | CREATE FUNCTION {schema}.{name} ({args}) RETURNS {returns} AS $${body}$$ LANGUAGE {language}; 18 | """ 19 | 20 | return text( 21 | _template.format( 22 | schema=schema, 23 | name=name, 24 | args=args, 25 | returns=returns, 26 | body=body, 27 | language=language, 28 | ) 29 | ) 30 | 31 | 32 | left_integer = create_udf( 33 | name="LEFT", 34 | args="s1 INTEGER, s2 INTEGER", 35 | returns="INTEGER", 36 | body="SELECT LEFT(s1::TEXT, s2)::INTEGER", 37 | language=UdfLanguage.SQL.value, 38 | ) 39 | 40 | right_integer = create_udf( 41 | name="RIGHT", 42 | args="s1 INTEGER, s2 INTEGER", 43 | returns="INTEGER", 44 | body="SELECT RIGHT(s1::TEXT, s2)::INTEGER", 45 | language=UdfLanguage.SQL.value, 46 | ) 47 | 48 | dateadd_kwargs = { 49 | "body": "SELECT d + (n::VARCHAR || i)::INTERVAL", 50 | "language": UdfLanguage.SQL.value, 51 | } 52 | 53 | dateadd_date = create_udf( 54 | name="DATEADD", 55 | args="i VARCHAR, n INTEGER, d DATE", 56 | returns="TIMESTAMP WITHOUT TIME ZONE", 57 | **dateadd_kwargs, 58 | ) 59 | 60 | dateadd_timestamp = create_udf( 61 | name="DATEADD", 62 | args="i VARCHAR, n INTEGER, d TIMESTAMP WITHOUT TIME ZONE", 63 | returns="TIMESTAMP WITHOUT TIME ZONE", 64 | **dateadd_kwargs, 65 | ) 66 | 67 | dateadd_timestamptz = create_udf( 68 | name="DATEADD", 69 | args="i VARCHAR, n INTEGER, d TIMESTAMP WITH TIME ZONE", 70 | returns="TIMESTAMP WITH TIME ZONE", 71 | **dateadd_kwargs, 72 | ) 73 | 74 | date_add_date = create_udf( 75 | name="DATE_ADD", 76 | args="i VARCHAR, n INTEGER, d DATE", 77 | returns="TIMESTAMP WITHOUT TIME ZONE", 78 | **dateadd_kwargs, 79 | ) 80 | 81 | date_add_timestamp = create_udf( 82 | name="DATE_ADD", 83 | args="i VARCHAR, n INTEGER, d TIMESTAMP WITHOUT TIME ZONE", 84 | returns="TIMESTAMP WITHOUT TIME ZONE", 85 | **dateadd_kwargs, 86 | ) 87 | 88 | date_add_timestamptz = create_udf( 89 | name="DATE_ADD", 90 | args="i VARCHAR, n INTEGER, d TIMESTAMP WITH TIME ZONE", 91 | returns="TIMESTAMP WITH TIME ZONE", 92 | **dateadd_kwargs, 93 | ) 94 | 95 | len_varchar = create_udf( 96 | name="LEN", 97 | args="s VARCHAR", 98 | returns="INTEGER", 99 | body="SELECT LENGTH(s)", 100 | language=UdfLanguage.SQL.value, 101 | ) 102 | 103 | convert_timezone = create_udf( 104 | name="CONVERT_TIMEZONE", 105 | args="source_tz VARCHAR, target_tz VARCHAR, ts TIMESTAMP", 106 | returns="TIMESTAMP", 107 | body="SELECT ts AT TIME ZONE source_tz AT TIME ZONE target_tz", 108 | language=UdfLanguage.SQL.value, 109 | ) 110 | 111 | convert_timezone_no_source = create_udf( 112 | name="CONVERT_TIMEZONE", 113 | args="target_tz VARCHAR, ts TIMESTAMP", 114 | returns="TIMESTAMP", 115 | body="SELECT ts AT TIME ZONE 'UTC' AT TIME ZONE target_tz", 116 | language=UdfLanguage.SQL.value, 117 | ) 118 | 119 | 120 | datediff_kwargs = { 121 | "returns": "BIGINT", 122 | # Credit: https://gist.github.com/JoshuaGross/18b9bb1db8021efc88884cbd8dc8fddb 123 | "body": """ 124 | DECLARE 125 | diff_interval INTERVAL; 126 | diff INT = 0; 127 | years_diff INT = 0; 128 | 129 | BEGIN 130 | IF units NOT IN ( 131 | 'y', 'yr', 'yrs', 'year', 'years', 132 | 'month', 'months', 'mon', 'mons', 133 | 'week', 'weeks', 'w', 134 | 'day', 'days', 'd', 135 | 'hour', 'hours', 'h', 'hr', 'hrs', 136 | 'minute', 'minutes', 'm', 'min', 'mins', 137 | 'second', 'seconds' 138 | ) THEN 139 | RAISE EXCEPTION 'Invalid unit % specified', units; 140 | END IF; 141 | 142 | IF units IN ( 143 | 'y', 'yr', 'yrs', 'year', 'years', 144 | 'month', 'months', 'mon', 'mons' 145 | ) THEN 146 | years_diff = DATE_PART('year', end_t) - DATE_PART('year', start_t); 147 | 148 | IF units IN ('y', 'yr', 'yrs', 'year', 'years') THEN 149 | RETURN years_diff::BIGINT; 150 | ELSE 151 | RETURN ( 152 | years_diff * 12 + (DATE_PART('month', end_t) - DATE_PART('month', start_t)) 153 | )::BIGINT; 154 | END IF; 155 | END IF; 156 | 157 | IF pg_typeof(start_t) = pg_typeof(DATE('2000-01-01')) THEN 158 | diff_interval = (end_t - start_t) * '1 day'::INTERVAL; 159 | ELSE 160 | diff_interval = (end_t - start_t); 161 | END IF; 162 | 163 | diff = diff + DATE_PART('day', diff_interval); 164 | 165 | IF units IN ('week', 'weeks', 'w') THEN 166 | diff = diff/7; 167 | RETURN diff::BIGINT; 168 | END IF; 169 | 170 | IF units IN ('day', 'days', 'd') THEN 171 | RETURN diff::BIGINT; 172 | END IF; 173 | 174 | diff = diff * 24 + DATE_PART('hour', diff_interval); 175 | 176 | IF units IN ('hour', 'hours', 'h', 'hr', 'hrs') THEN 177 | RETURN diff::BIGINT; 178 | END IF; 179 | 180 | diff = diff * 60 + DATE_PART('minute', diff_interval); 181 | 182 | IF units IN ('minute', 'minutes', 'm', 'min', 'mins') THEN 183 | RETURN diff::BIGINT; 184 | END IF; 185 | 186 | diff = diff * 60 + DATE_PART('second', diff_interval); 187 | 188 | RETURN diff::BIGINT; 189 | 190 | END; 191 | """, 192 | "language": UdfLanguage.PLPGSQL.value, 193 | } 194 | 195 | datediff_date = create_udf( 196 | name="DATEDIFF", args="units VARCHAR, start_t DATE, end_t DATE", **datediff_kwargs 197 | ) 198 | 199 | datediff_timestamp = create_udf( 200 | name="DATEDIFF", 201 | args="units VARCHAR, start_t TIMESTAMP, end_t TIMESTAMP", 202 | **datediff_kwargs, 203 | ) 204 | 205 | datediff_timestamptz = create_udf( 206 | name="DATEDIFF", 207 | args="units VARCHAR, start_t TIMESTAMP WITH TIME ZONE, end_t TIMESTAMP WITH TIME ZONE", 208 | **datediff_kwargs, 209 | ) 210 | 211 | REDSHIFT_UDFS = Statements( 212 | dateadd_date, 213 | dateadd_timestamp, 214 | dateadd_timestamptz, 215 | date_add_date, 216 | date_add_timestamp, 217 | date_add_timestamptz, 218 | datediff_date, 219 | datediff_timestamp, 220 | datediff_timestamptz, 221 | left_integer, 222 | right_integer, 223 | len_varchar, 224 | convert_timezone, 225 | convert_timezone_no_source, 226 | ) 227 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/patch/redshift/mock_s3_copy.py: -------------------------------------------------------------------------------- 1 | import binascii 2 | import csv 3 | import gzip 4 | import io 5 | from dataclasses import dataclass 6 | from typing import List, Optional 7 | 8 | from pytest_mock_resources.compat import boto3 9 | 10 | 11 | @dataclass 12 | class S3CopyCommand: 13 | table_name: str 14 | delimiter: str 15 | s3_uri: str 16 | empty_as_null: bool 17 | aws_access_key_id: str 18 | aws_secret_access_key: str 19 | columns: List[str] 20 | format: str = "CSV" 21 | schema_name: Optional[str] = None 22 | 23 | 24 | def mock_s3_copy_command(statement, cursor): 25 | copy_command = _parse_s3_command(statement) 26 | return _mock_s3_copy(cursor, copy_command) 27 | 28 | 29 | def _parse_s3_command(statement): 30 | """Format, Parse and call patched 'COPY' command.""" 31 | statement = strip(statement) 32 | params = {} 33 | 34 | # deleting copy 35 | tokens = statement.split()[1:] 36 | 37 | # Fetching table name 38 | params["schema_name"], params["table_name"] = _split_table_name(tokens.pop(0)) 39 | 40 | # Checking for columns 41 | params["columns"] = [] 42 | if tokens[0][0] == "(": 43 | ending_index = 0 44 | for index, arg in enumerate(tokens): 45 | if arg.endswith(")"): 46 | ending_index = index 47 | break 48 | 49 | ending_index += 1 50 | columns = tokens[0:ending_index] 51 | columns[0] = columns[0].replace("(", "") 52 | columns[-1] = columns[-1].replace(")", "") 53 | columns = [x.replace(",", "") for x in columns] 54 | columns = [x for x in columns if x != ""] 55 | tokens = tokens[ending_index:] 56 | params["columns"] = columns 57 | 58 | # Fetching s3_uri 59 | if tokens.pop(0).lower() != "from": 60 | raise ValueError( 61 | "Possibly malformed S3 URI Format. " 62 | f"Statement = {statement}" 63 | "Redshift fixture only supports S3 Copy statements with the following syntax: " 64 | "COPY FROM [(column 1, [column2, [..]])] '' " 65 | "credentials 'aws_access_key_id=;" 66 | "aws_secret_access_key='" 67 | ) 68 | params["s3_uri"] = strip(tokens.pop(0)) 69 | empty_as_null = False 70 | delimiter = None 71 | # Fetching credentials 72 | for token in tokens: 73 | if "aws_access_key_id" in token.lower() or "aws_secret_access_key" in token.lower(): 74 | # This is because of the following possibiliteis: 75 | # ... [with ]credentials[ AS] 'aws_access_key_id=x;aws_secret_access_key=y' 76 | # OR 77 | # ... [with ]credentials[ AS] 'aws_secret_access_key=y;aws_access_key_id=x' 78 | # OR 79 | # ... [with ]credentials[ AS] 'aws_secret_access_key=y;\naws_access_key_id=x' 80 | # OR 81 | # ... [with ]credentials[ AS] 'aws_secret_access_key=y; aws_access_key_id=x' 82 | # Supportred AWS credentials format: 83 | # [with ]credentials[ AS] 'aws_secret_access_key=y; aws_access_key_id=x' 84 | # No Support for additional credential formats, eg IAM roles, etc, yet. 85 | credentials_list = token.split(";") 86 | for credentials in credentials_list: 87 | if "aws_access_key_id" in credentials: 88 | params["aws_access_key_id"] = credentials.split("=")[-1] 89 | elif "aws_secret_access_key" in credentials: 90 | params["aws_secret_access_key"] = credentials.split("=")[-1] 91 | else: 92 | raise ValueError( 93 | "Possibly malformed AWS Credentials Format. " 94 | f"Statement = {statement}" 95 | "Redshift fixture only supports S3 Copy statements with the following " 96 | "syntax: COPY FROM [(column 1, [column2, [..]])] '" 97 | "' " 98 | "credentials 'aws_access_key_id=;" 99 | "aws_secret_access_key=' " 100 | "Supportred AWS credentials format: " 101 | "[with ]credentials[ AS] 'aws_secret_access_key=y; aws_access_key_id=x'" 102 | " No Support for additional credential formats, eg IAM roles, etc, yet." 103 | ) 104 | if "emptyasnull" == token.lower(): 105 | empty_as_null = True 106 | if "csv" == token.lower(): 107 | delimiter = "," 108 | 109 | if delimiter is None: 110 | delimiter = "|" 111 | return S3CopyCommand(**params, empty_as_null=empty_as_null, delimiter=delimiter) 112 | 113 | 114 | def _split_table_name(table_name): 115 | """Split 'schema_name.table_name' to (schema_name, table_name).""" 116 | table_name_items = table_name.split(".") 117 | if len(table_name_items) == 1: 118 | schema_name = None 119 | elif len(table_name_items) == 2: 120 | schema_name, table_name = table_name_items 121 | else: 122 | raise ValueError(f"Cannot determine schema/table name from input {table_name}") 123 | return schema_name, table_name 124 | 125 | 126 | def _mock_s3_copy( 127 | cursor, 128 | copy_command, 129 | ): 130 | """Execute patched 'copy' command.""" 131 | s3 = boto3.client( 132 | "s3", 133 | aws_access_key_id=copy_command.aws_access_key_id, 134 | aws_secret_access_key=copy_command.aws_secret_access_key, 135 | ) 136 | ending_index = len(copy_command.s3_uri) 137 | path_to_file = copy_command.s3_uri[5:ending_index] 138 | bucket, key = path_to_file.split("/", 1) 139 | response = s3.get_object(Bucket=bucket, Key=key) 140 | 141 | # the following lins of code is used to check if the file is gzipped or not. 142 | # To do so we use magic numbers. 143 | # A mgic number is a constant numerical or text value used to identify a file format or protocol 144 | # The magic number for gzip compressed files is 1f 8b. 145 | is_gzipped = binascii.hexlify(response["Body"].read(2)) == b"1f8b" 146 | 147 | response = s3.get_object(Bucket=bucket, Key=key) 148 | data = get_raw_file(response["Body"].read(), is_gzipped) 149 | 150 | cursor.copy_expert( 151 | "COPY {cc.table_name} FROM STDIN WITH DELIMITER AS '{cc.delimiter}' {cc.format} HEADER {non_null_clause}".format( 152 | cc=copy_command, 153 | non_null_clause=("FORCE NOT NULL " + ", ".join(copy_command.columns)) 154 | if copy_command.columns 155 | else "", 156 | ), 157 | data, 158 | ) 159 | 160 | 161 | def get_raw_file(file, is_gzipped=False): 162 | buffer = io.BytesIO(file) 163 | if is_gzipped: 164 | buffer = gzip.GzipFile(fileobj=buffer, mode="rb") 165 | return buffer 166 | 167 | 168 | def read_data_csv(file, is_gzipped=False, columns=None, delimiter="|"): 169 | buffer = get_raw_file(file, is_gzipped=is_gzipped) 170 | wrapper = io.TextIOWrapper(buffer) 171 | reader = csv.DictReader( 172 | wrapper, 173 | delimiter=delimiter, 174 | quoting=csv.QUOTE_MINIMAL, 175 | quotechar='"', 176 | lineterminator="\n", 177 | skipinitialspace=True, 178 | doublequote=True, 179 | ) 180 | return [dict(row) for row in reader] 181 | 182 | 183 | def strip(input_string): 184 | """Strip trailing whitespace, single/double quotes.""" 185 | return input_string.strip().rstrip(";").strip('"').strip("'") 186 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/patch/redshift/mock_s3_unload.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import gzip 3 | import io 4 | 5 | from pytest_mock_resources.compat import boto3 6 | from pytest_mock_resources.patch.redshift.mock_s3_copy import strip 7 | 8 | 9 | def mock_s3_unload_command(statement, cursor): 10 | params = _parse_s3_command(statement) 11 | 12 | return _mock_s3_unload( 13 | select_statement=params["select_statement"], 14 | s3_uri=params["s3_uri"], 15 | aws_secret_access_key=params["aws_secret_access_key"], 16 | aws_access_key_id=params["aws_access_key_id"], 17 | cursor=cursor, 18 | delimiter=params.get("delimiter", "|"), 19 | is_gzipped=params["gzip"], 20 | ) 21 | 22 | 23 | def _parse_s3_command(statement): 24 | """Format and Parse 'UNLOAD' command.""" 25 | statement = strip(statement) 26 | params = {} 27 | 28 | # deleting 'unload' 29 | tokens = statement.split()[1:] 30 | 31 | # Fetching select statement 32 | select_statement = "" 33 | error_flag = False 34 | for index, token in enumerate(tokens): 35 | if token.lower() == "to": 36 | tokens = tokens[index:] 37 | break 38 | select_statement += " " + token 39 | params["select_statement"] = select_statement 40 | if error_flag: 41 | raise ValueError( 42 | "Possibly malformed SELECT Statement. " 43 | f"Statement = {statement}" 44 | "Redshift fixture only supports S3 Unload statements with the following syntax: " 45 | "UNLOAD ('select-statement') TO 's3://object-path/name-prefix'" 46 | "authorization 'aws_access_key_id=;" 47 | "aws_secret_access_key='" 48 | "[GZIP] [DELIMITER [ AS ] 'delimiter-char']" 49 | ) 50 | 51 | # Fetching s3_uri 52 | if tokens.pop(0).lower() != "to": 53 | raise ValueError( 54 | "Possibly malformed S3 URI Format. " 55 | f"Statement = {statement}" 56 | "Redshift fixture only supports S3 Unload statements with the following syntax: " 57 | "UNLOAD ('select-statement') TO 's3://object-path/name-prefix'" 58 | "authorization 'aws_access_key_id=;" 59 | "aws_secret_access_key='" 60 | "[GZIP] [DELIMITER [ AS ] 'delimiter-char']" 61 | ) 62 | params["s3_uri"] = strip(tokens.pop(0)) 63 | 64 | # Fetching authorization 65 | for token in tokens: 66 | if "aws_access_key_id" in token.lower() or "aws_secret_access_key" in token.lower(): 67 | # This is because of the following possibiliteis: 68 | # ... [with ]authorization[ AS] 'aws_access_key_id=x;aws_secret_access_key=y' 69 | # OR 70 | # ... [with ]authorization[ AS] 'aws_secret_access_key=y;aws_access_key_id=x' 71 | # OR 72 | # ... [with ]authorization[ AS] 'aws_secret_access_key=y;\naws_access_key_id=x' 73 | # OR 74 | # ... [with ]authorization[ AS] 'aws_secret_access_key=y; aws_access_key_id=x' 75 | # Supportred AWS authorization format: 76 | # [with ]authorization[ AS] 'aws_secret_access_key=y; aws_access_key_id=x' 77 | # No Support for additional credential formats, eg IAM roles, etc, yet. 78 | credentials_list = token.split(";") 79 | for credentials in credentials_list: 80 | if "aws_access_key_id" in credentials: 81 | params["aws_access_key_id"] = credentials.split("=")[-1] 82 | elif "aws_secret_access_key" in credentials: 83 | params["aws_secret_access_key"] = credentials.split("=")[-1] 84 | else: 85 | raise ValueError( 86 | "Possibly malformed AWS Credentials Format. " 87 | f"Statement = {statement}" 88 | "Redshift fixture only supports S3 Copy statements with the following " 89 | "syntax: COPY FROM [(column 1, [column2, [..]])] '" 90 | "' " 91 | "credentials 'aws_access_key_id=;" 92 | "aws_secret_access_key=' " 93 | "Supportred AWS credentials format: " 94 | "[with ]credentials[ AS] 'aws_secret_access_key=y; aws_access_key_id=x'" 95 | " No Support for additional credential formats, eg IAM roles, etc, yet." 96 | ) 97 | 98 | # Fetching GZIP Flag 99 | params["gzip"] = False 100 | for token in tokens: 101 | if strip(token.lower()) == "gzip": 102 | params["gzip"] = True 103 | 104 | # Fetching delimiter 105 | for index, token in enumerate(tokens): 106 | if token.lower() == "delimiter": 107 | try: 108 | if tokens[index + 1].lower() != "as": 109 | params["delimiter"] = strip(tokens[index + 1]) 110 | else: 111 | params["delimiter"] = strip(tokens[index + 2]) 112 | except IndexError: 113 | raise ValueError( 114 | "Possibly malformed Delimiter Format. " 115 | f"Statement = {statement}" 116 | "Redshift fixture only supports S3 Unload statements with the following" 117 | "syntax: UNLOAD ('select-statement') TO 's3://object-path/name-prefix'" 118 | "authorization 'aws_access_key_id=;" 119 | "aws_secret_access_key='" 120 | "[GZIP] [DELIMITER [ AS ] 'delimiter-char']" 121 | ) 122 | return params 123 | 124 | 125 | def _mock_s3_unload( 126 | select_statement, 127 | s3_uri, 128 | aws_secret_access_key, 129 | aws_access_key_id, 130 | cursor, 131 | delimiter, 132 | is_gzipped, 133 | ): 134 | """Execute patched 'unload' command.""" 135 | # Parsing s3 uri 136 | ending_index = len(s3_uri) 137 | path_to_file = s3_uri[5:ending_index] 138 | bucket, key = path_to_file.split("/", 1) 139 | 140 | cursor.execute(select_statement) 141 | result = cursor.fetchall() 142 | column_names = [desc[0] for desc in cursor.description] 143 | buffer = get_data_csv( 144 | result, column_names=column_names, is_gzipped=is_gzipped, delimiter=delimiter 145 | ) 146 | 147 | # Push the data to the S3 Bucket. 148 | conn = boto3.resource( 149 | "s3", aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key 150 | ) 151 | conn.create_bucket(Bucket=bucket) 152 | obj = conn.Object(bucket, key) 153 | obj.put(Body=buffer) 154 | 155 | 156 | def get_data_csv(rows, column_names, is_gzipped=False, delimiter="|", **additional_to_csv_options): 157 | result = io.BytesIO() 158 | buffer = result 159 | if is_gzipped: 160 | buffer = gzip.GzipFile(fileobj=buffer, mode="wb") 161 | 162 | wrapper = io.TextIOWrapper(buffer) 163 | 164 | writer = csv.DictWriter( 165 | wrapper, 166 | fieldnames=column_names, 167 | delimiter=delimiter, 168 | quoting=csv.QUOTE_MINIMAL, 169 | quotechar='"', 170 | lineterminator="\n", 171 | skipinitialspace=True, 172 | doublequote=True, 173 | ) 174 | writer.writeheader() 175 | for row in rows: 176 | writer.writerow(dict(zip(column_names, row))) 177 | 178 | wrapper.detach() 179 | 180 | if is_gzipped: 181 | buffer.close() 182 | 183 | result.seek(0) 184 | return result 185 | -------------------------------------------------------------------------------- /src/pytest_mock_resources/container/base.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import contextlib 4 | import json 5 | import pathlib 6 | import socket 7 | import time 8 | import types 9 | from typing import Awaitable, Callable, TYPE_CHECKING, TypeVar 10 | 11 | from pytest_mock_resources.hooks import ( 12 | get_docker_client, 13 | get_pytest_flag, 14 | use_multiprocess_safe_mode, 15 | ) 16 | 17 | try: 18 | import responses as _responses 19 | 20 | responses: types.ModuleType | None = _responses 21 | del _responses 22 | except ImportError: 23 | responses = None 24 | 25 | if TYPE_CHECKING: 26 | from python_on_whales.docker_client import DockerClient 27 | 28 | 29 | DEFAULT_RETRIES = 40 30 | DEFAULT_INTERVAL = 0.5 31 | 32 | 33 | class ContainerCheckFailed(Exception): # noqa: N818 34 | """Unable to connect to a Container.""" 35 | 36 | 37 | T = TypeVar("T") 38 | 39 | 40 | async def async_retry( 41 | func: Callable[..., Awaitable[T]], 42 | *, 43 | args=(), 44 | kwargs={}, 45 | retries=1, 46 | interval=DEFAULT_INTERVAL, 47 | on_exc=Exception, 48 | ) -> T: 49 | while retries: 50 | retries -= 1 51 | try: 52 | result = await func(*args, **kwargs) 53 | except on_exc: 54 | if not retries: 55 | raise 56 | time.sleep(interval) 57 | else: 58 | return result 59 | 60 | raise NotImplementedError() # pragma: no cover 61 | 62 | 63 | def retry( 64 | func: Callable[..., T], 65 | *, 66 | args=(), 67 | kwargs={}, 68 | retries=1, 69 | interval=DEFAULT_INTERVAL, 70 | on_exc=Exception, 71 | ) -> T: 72 | while retries: 73 | retries -= 1 74 | try: 75 | result = func(*args, **kwargs) 76 | except on_exc: 77 | if not retries: 78 | raise 79 | time.sleep(interval) 80 | else: 81 | return result 82 | 83 | raise NotImplementedError() # pragma: no cover 84 | 85 | 86 | def get_container(pytestconfig, config, *, retries=DEFAULT_RETRIES, interval=DEFAULT_INTERVAL): 87 | multiprocess_safe_mode = use_multiprocess_safe_mode(pytestconfig) 88 | docker = get_docker_client(pytestconfig) 89 | 90 | if responses: 91 | # XXX: moto library may over-mock responses. SEE: https://github.com/spulec/moto/issues/1026 92 | responses.add_passthru("http+docker") 93 | 94 | # The creation of container can fail and leave us in a situation where it's 95 | # we will need to know whether it's been created already or not. 96 | container = None 97 | 98 | try: 99 | if multiprocess_safe_mode: 100 | from filelock import FileLock 101 | 102 | # get the temp directory shared by all workers (assuming pytest-xdist) 103 | root_tmp_dir = pytestconfig._tmp_path_factory.getbasetemp().parent 104 | fn = root_tmp_dir / f"pmr_create_container_{config.port}.lock" 105 | # wait for the container one process at a time 106 | with FileLock(str(fn)): 107 | container = wait_for_container( 108 | docker, 109 | config, 110 | retries=retries, 111 | interval=interval, 112 | ) 113 | if container: 114 | record_container_creation(pytestconfig, container) 115 | 116 | else: 117 | container = wait_for_container( 118 | docker, 119 | config, 120 | retries=retries, 121 | interval=interval, 122 | ) 123 | 124 | yield config 125 | finally: 126 | cleanup_container = get_pytest_flag(pytestconfig, "pmr_cleanup_container", default=True) 127 | if cleanup_container and container and not multiprocess_safe_mode: 128 | container.kill() 129 | 130 | 131 | def wait_for_container( 132 | docker: DockerClient, config, *, retries=DEFAULT_RETRIES, interval=DEFAULT_INTERVAL 133 | ): 134 | """Wait for evidence that the container is up and healthy. 135 | 136 | The caller must provide a `check_fn` which should `raise ContainerCheckFailed` if 137 | it finds that the container is not yet up. 138 | """ 139 | if config.port is None: 140 | config.set("port", unused_tcp_port()) 141 | 142 | check_fn = config.check_fn 143 | run_args = (config.image,) 144 | run_kwargs = { 145 | "publish": [(dest, source) for source, dest in config.ports().items()], 146 | "envs": config.environment(), 147 | "name": container_name(config.name, config.port), 148 | } 149 | 150 | try: 151 | from python_on_whales.exceptions import DockerException 152 | except ImportError: # pragma: no cover 153 | from python_on_whales.utils import DockerException 154 | 155 | try: 156 | # Perform a single attempt, for the happy-path where the container already exists. 157 | retry(check_fn, retries=1, interval=interval, on_exc=ContainerCheckFailed) 158 | except ContainerCheckFailed: 159 | # In the event it doesn't exist, we attempt to start the container 160 | try: 161 | container = docker.run(*run_args, **run_kwargs, detach=True, remove=True) 162 | except DockerException as e: 163 | container = None 164 | # This sometimes happens if multiple container fixtures race for the first 165 | # creation of the container, we want to still retry wait in this case. 166 | port_allocated_error = "port is already allocated" 167 | name_allocated_error = "to be able to reuse that name" 168 | 169 | error = str(e) 170 | if port_allocated_error not in error and name_allocated_error not in error: 171 | raise 172 | 173 | # And then we perform more lengthy retry cycle. 174 | retry(check_fn, retries=retries, interval=interval, on_exc=ContainerCheckFailed) 175 | return container 176 | return None 177 | 178 | 179 | def container_name(name: str, port) -> str: 180 | return f"pmr_{name}_{port}" 181 | 182 | 183 | def record_container_creation(pytestconfig, container): 184 | """Record the fact of the creation of a container. 185 | 186 | Record both a local reference to the container in pytest's `config` fixture, 187 | as well as a global PMR lock file of created containers. 188 | """ 189 | pytestconfig._pmr_containers.append(container) 190 | 191 | fn = get_tmp_root(pytestconfig, parent=True) 192 | with load_container_lockfile(fn) as data: 193 | data.append(container.id) 194 | fn.write_text(json.dumps(data)) 195 | 196 | 197 | def get_tmp_root(pytestconfig, *, parent=False): 198 | """Get the path to the PMR lock file.""" 199 | tmp_path_factory = pytestconfig._tmp_path_factory 200 | 201 | root_tmp_dir = tmp_path_factory.getbasetemp().parent 202 | if parent: 203 | root_tmp_dir = root_tmp_dir.parent 204 | 205 | return root_tmp_dir / "pmr.json" 206 | 207 | 208 | @contextlib.contextmanager 209 | def load_container_lockfile(path: pathlib.Path): 210 | """Produce the contents of the given file behind a file lock.""" 211 | import filelock 212 | 213 | with filelock.FileLock(str(path) + ".lock"): 214 | if path.is_file(): 215 | with open(path, "rb") as f: 216 | yield json.load(f) 217 | else: 218 | yield [] 219 | 220 | 221 | def unused_tcp_port(): 222 | """Find an unused localhost TCP port from 1024-65535 and return it.""" 223 | with contextlib.closing(socket.socket()) as sock: 224 | sock.bind(("127.0.0.1", 0)) 225 | return sock.getsockname()[1] 226 | -------------------------------------------------------------------------------- /tests/fixture/test_engine_manager.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import sqlalchemy 3 | from sqlalchemy import Column, Integer, MetaData, SmallInteger, Table, text, Unicode 4 | from sqlalchemy.orm import sessionmaker 5 | 6 | from pytest_mock_resources import create_postgres_fixture, create_sqlite_fixture, Rows 7 | from pytest_mock_resources.compat.sqlalchemy import declarative_base, select 8 | from pytest_mock_resources.sqlalchemy import identify_matching_tables 9 | from tests import skip_if_not_sqlalchemy2, skip_if_sqlalchemy2 10 | 11 | Base = declarative_base() 12 | 13 | 14 | class Thing(Base): 15 | __tablename__ = "thing" 16 | 17 | id = Column(Integer, autoincrement=True, primary_key=True) 18 | name = Column(Unicode(5), nullable=False) 19 | 20 | 21 | class Other(Base): 22 | __tablename__ = "other" 23 | __table_args__ = {"schema": "public"} 24 | 25 | id = Column(Integer, autoincrement=True, primary_key=True) 26 | name = Column(Unicode(5), nullable=False) 27 | 28 | 29 | class Other2(Base): 30 | __tablename__ = "other" 31 | __table_args__ = {"schema": "other"} 32 | 33 | id = Column(Integer, autoincrement=True, primary_key=True) 34 | 35 | 36 | sqlite_model = create_sqlite_fixture(Base, tables=[Thing], session=True) 37 | sqlite_table = create_sqlite_fixture(Base, tables=[Thing.__table__], session=True) 38 | sqlite_name_implicit_schema = create_sqlite_fixture(Base, tables=["thing"], session=True) 39 | sqlite_name_explicit_schema = create_sqlite_fixture(Base, tables=["other.other"], session=True) 40 | sqlite_name_bad_table = create_sqlite_fixture(Base, tables=["foo"], session=True) 41 | sqlite_duplicate = create_sqlite_fixture( 42 | Base, tables=[Thing, Other2.__table__, "thing", "other.other"], session=True 43 | ) 44 | sqlite_glob = create_sqlite_fixture(Base, tables=["*.other"], session=True) 45 | 46 | 47 | class TestTablesArg: 48 | def test_model_object(self, sqlite_model): 49 | sqlite_model.execute(text("select * from thing")) 50 | with pytest.raises(sqlalchemy.exc.OperationalError): 51 | sqlite_model.execute(text("select * from other.other")) 52 | 53 | def test_table_object(self, sqlite_table): 54 | sqlite_table.execute(text("select * from thing")) 55 | with pytest.raises(sqlalchemy.exc.OperationalError): 56 | sqlite_table.execute(text("select * from other.other")) 57 | 58 | def test_table_name_implicit_schema(self, sqlite_name_implicit_schema): 59 | sqlite_name_implicit_schema.execute(text("select * from thing")) 60 | with pytest.raises(sqlalchemy.exc.OperationalError): 61 | sqlite_name_implicit_schema.execute(text("select * from other.other")) 62 | 63 | def test_table_name_explicit_schema(self, sqlite_name_explicit_schema): 64 | sqlite_name_explicit_schema.execute(text("select * from other.other")) 65 | with pytest.raises(sqlalchemy.exc.OperationalError): 66 | sqlite_name_explicit_schema.execute(text("select * from public.other")) 67 | 68 | @pytest.mark.xfail(strict=True, raises=ValueError) 69 | def test_table_name_bad_name(self, sqlite_name_bad_table): 70 | pass 71 | 72 | def test_table_duplicate(self, sqlite_duplicate): 73 | sqlite_duplicate.execute(text("select * from thing")) 74 | sqlite_duplicate.execute(text("select * from other.other")) 75 | with pytest.raises(sqlalchemy.exc.OperationalError): 76 | sqlite_duplicate.execute(text("select * from public.other")) 77 | 78 | def test_glob(self, sqlite_glob): 79 | with pytest.raises(sqlalchemy.exc.OperationalError): 80 | sqlite_glob.execute(text("select * from thing")) 81 | 82 | sqlite_glob.execute(text("select * from other.other")) 83 | sqlite_glob.execute(text("select * from public.other")) 84 | 85 | 86 | PGBase = declarative_base() 87 | 88 | 89 | class Quarter(PGBase): 90 | __tablename__ = "quarter" 91 | __table_args__ = {"schema": "public"} 92 | 93 | id = Column(Integer, primary_key=True) 94 | year = Column(SmallInteger, nullable=False) 95 | quarter = Column(SmallInteger, nullable=False) 96 | 97 | 98 | class Report(PGBase): 99 | __tablename__ = "report" 100 | 101 | id = Column(Integer, primary_key=True) 102 | 103 | 104 | pg_implicit_schema = create_postgres_fixture(PGBase, tables=["report"]) 105 | pg_explicit_schema = create_postgres_fixture(PGBase, tables=["public.quarter"]) 106 | 107 | 108 | class TestPg: 109 | def test_implicit_schema(self, pg_implicit_schema): 110 | with pg_implicit_schema.begin() as conn: 111 | conn.execute(text("select * from report")) 112 | with pytest.raises(sqlalchemy.exc.ProgrammingError): 113 | conn.execute(text("select * from public.quarter")) 114 | 115 | def test_explicit_schema(self, pg_explicit_schema): 116 | with pg_explicit_schema.begin() as conn: 117 | conn.execute(text("select * from quarter")) 118 | with pytest.raises(sqlalchemy.exc.ProgrammingError): 119 | conn.execute(text("select * from report")) 120 | 121 | 122 | rows = Rows(Quarter(id=1, year=1, quarter=1)) 123 | sqlite = create_sqlite_fixture(PGBase, rows, session=True) 124 | sqlite2 = create_sqlite_fixture(PGBase, rows, session=sessionmaker(autocommit=True)) 125 | pg_session = create_postgres_fixture( 126 | PGBase, rows, session=True, tables=["report", "public.quarter"] 127 | ) 128 | 129 | pg_session_async = create_postgres_fixture( 130 | PGBase, rows, session=True, tables=["report", "public.quarter"], async_=True 131 | ) 132 | 133 | 134 | class TestSessionArg: 135 | def test_session(self, sqlite): 136 | result = sqlite.query(Quarter).one() 137 | assert result.id == 1 138 | 139 | sqlite.execute(text("INSERT INTO report (id) VALUES (1)")) 140 | 141 | sqlite.rollback() 142 | result = sqlite.query(Report).all() 143 | assert len(result) == 0 144 | 145 | @skip_if_sqlalchemy2 146 | def test_session2(self, sqlite2): 147 | sqlite2.execute(text("INSERT INTO report (id) VALUES (1)")) 148 | sqlite2.rollback() 149 | result = sqlite2.query(Report).all() 150 | assert len(result) == 1 151 | 152 | def test_session_pg(self, pg_session): 153 | result = pg_session.query(Quarter).one() 154 | assert result.id == 1 155 | 156 | @pytest.mark.asyncio 157 | @skip_if_not_sqlalchemy2 158 | async def test_session_pg_async(self, pg_session_async): 159 | result = (await pg_session_async.execute(select(Quarter))).scalars().one() 160 | assert result.id == 1 161 | 162 | 163 | class Test__identify_matching_tables: 164 | @staticmethod 165 | def by_tablename(table): 166 | return table.schema + table.name 167 | 168 | def setup_metadata(self): 169 | self.metadata = metadata = MetaData() 170 | self.base = declarative_base() 171 | 172 | self.one_foo = Table("foo", metadata, schema="one") 173 | self.one_foo_bar = Table("foo_bar", metadata, schema="one") 174 | self.two_foo = Table("foo", metadata, schema="two") 175 | self.two_far = Table("far", metadata, schema="two") 176 | 177 | def test_no_glob(self): 178 | self.setup_metadata() 179 | result = identify_matching_tables(self.metadata, "one.foo") 180 | assert sorted(result, key=self.by_tablename) == [self.one_foo] 181 | 182 | def test_glob_table_on_schema(self): 183 | self.setup_metadata() 184 | result = identify_matching_tables(self.metadata, "one.*") 185 | assert sorted(result, key=self.by_tablename) == [self.one_foo, self.one_foo_bar] 186 | 187 | def test_glob_schema_on_table(self): 188 | self.setup_metadata() 189 | result = identify_matching_tables(self.metadata, "*.foo") 190 | assert sorted(result, key=self.by_tablename) == [self.one_foo, self.two_foo] 191 | 192 | def test_glob_optional_char(self): 193 | self.setup_metadata() 194 | result = identify_matching_tables(self.metadata, "two.f??") 195 | assert sorted(result, key=self.by_tablename) == [self.two_far, self.two_foo] 196 | --------------------------------------------------------------------------------