├── .devcontainer ├── Dockerfile ├── commands │ └── post-create-command.sh └── devcontainer.json ├── .gitattributes ├── .github ├── .release-please-manifest.json ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── bug-or-unexpected-behavior.md │ ├── feature-proposal.md │ ├── new-container.md │ └── question.md ├── PULL_REQUEST_TEMPLATE │ └── new_container.md ├── actions │ └── setup-env │ │ └── action.yml ├── release-please-config.json ├── settings.yml └── workflows │ ├── ci-community.yml │ ├── ci-core.yml │ ├── ci-lint.yml │ ├── docs.yml │ ├── pr-lint.yml │ └── release-please.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── CHANGELOG.md ├── Dockerfile ├── Dockerfile.diagnostics ├── LICENSE.txt ├── Makefile ├── README.md ├── conf.py ├── core ├── README.rst ├── testcontainers │ ├── compose │ │ ├── __init__.py │ │ └── compose.py │ ├── core │ │ ├── __init__.py │ │ ├── auth.py │ │ ├── config.py │ │ ├── container.py │ │ ├── docker_client.py │ │ ├── exceptions.py │ │ ├── generic.py │ │ ├── image.py │ │ ├── labels.py │ │ ├── network.py │ │ ├── utils.py │ │ ├── version.py │ │ └── waiting_utils.py │ └── socat │ │ ├── __init__.py │ │ └── socat.py └── tests │ ├── compose_fixtures │ ├── basic │ │ └── docker-compose.yaml │ ├── basic_multiple │ │ └── docker-compose.yaml │ ├── basic_volume │ │ └── docker-compose.yaml │ ├── port_multiple │ │ └── compose.yaml │ ├── port_single │ │ └── compose.yaml │ └── profile_support │ │ └── compose.yaml │ ├── conftest.py │ ├── image_fixtures │ ├── busybox │ │ └── Dockerfile │ └── sample │ │ └── Dockerfile │ ├── test_auth.py │ ├── test_compose.py │ ├── test_config.py │ ├── test_container.py │ ├── test_core.py │ ├── test_core_ports.py │ ├── test_core_registry.py │ ├── test_docker_client.py │ ├── test_docker_in_docker.py │ ├── test_image.py │ ├── test_labels.py │ ├── test_network.py │ ├── test_new_docker_api.py │ ├── test_ryuk.py │ ├── test_socat.py │ ├── test_utils.py │ ├── test_version.py │ └── test_waiting_utils.py ├── doctests └── conf.py ├── index.rst ├── modules ├── README.md ├── arangodb │ ├── README.rst │ ├── testcontainers │ │ └── arangodb │ │ │ └── __init__.py │ └── tests │ │ └── test_arangodb.py ├── aws │ ├── README.rst │ ├── testcontainers │ │ └── aws │ │ │ ├── __init__.py │ │ │ └── aws_lambda.py │ └── tests │ │ ├── lambda_sample │ │ ├── Dockerfile │ │ └── lambda_function.py │ │ └── test_aws.py ├── azurite │ ├── README.rst │ ├── testcontainers │ │ └── azurite │ │ │ └── __init__.py │ └── tests │ │ └── test_azurite.py ├── cassandra │ ├── README.rst │ ├── testcontainers │ │ └── cassandra │ │ │ └── __init__.py │ └── tests │ │ └── test_cassandra.py ├── chroma │ ├── README.rst │ ├── testcontainers │ │ └── chroma │ │ │ └── __init__.py │ └── tests │ │ └── test_chroma.py ├── clickhouse │ ├── README.rst │ ├── testcontainers │ │ └── clickhouse │ │ │ └── __init__.py │ └── tests │ │ └── test_clickhouse.py ├── cockroachdb │ ├── README.rst │ ├── testcontainers │ │ └── cockroachdb │ │ │ └── __init__.py │ └── tests │ │ └── test_cockroachdb.py ├── cosmosdb │ ├── README.rst │ ├── testcontainers │ │ └── cosmosdb │ │ │ ├── __init__.py │ │ │ ├── _emulator.py │ │ │ ├── _grab.py │ │ │ ├── mongodb.py │ │ │ └── nosql.py │ └── tests │ │ ├── test_cosmosdb_emulator.py │ │ ├── test_cosmosdb_mongodb.py │ │ └── test_cosmosdb_nosql.py ├── db2 │ ├── README.rst │ ├── testcontainers │ │ └── db2 │ │ │ └── __init__.py │ └── tests │ │ └── test_db2.py ├── elasticsearch │ ├── README.rst │ ├── testcontainers │ │ └── elasticsearch │ │ │ └── __init__.py │ └── tests │ │ └── test_elasticsearch.py ├── generic │ ├── README.rst │ ├── testcontainers │ │ └── generic │ │ │ ├── __init__.py │ │ │ └── server.py │ └── tests │ │ ├── conftest.py │ │ ├── samples │ │ ├── advance_1 │ │ │ ├── Dockerfile │ │ │ └── app │ │ │ │ ├── __init__.py │ │ │ │ └── main.py │ │ ├── fastapi │ │ │ ├── Dockerfile │ │ │ └── app │ │ │ │ ├── __init__.py │ │ │ │ └── main.py │ │ └── python_server │ │ │ └── Dockerfile │ │ └── test_generic.py ├── google │ ├── README.rst │ ├── testcontainers │ │ └── google │ │ │ ├── __init__.py │ │ │ ├── datastore.py │ │ │ └── pubsub.py │ └── tests │ │ └── test_google.py ├── index.rst ├── influxdb │ ├── README.rst │ ├── testcontainers │ │ ├── influxdb.py │ │ ├── influxdb1 │ │ │ └── __init__.py │ │ └── influxdb2 │ │ │ └── __init__.py │ └── tests │ │ ├── __init__.py │ │ └── test_influxdb.py ├── k3s │ ├── README.rst │ ├── testcontainers │ │ └── k3s │ │ │ └── __init__.py │ └── tests │ │ └── test_k3s.py ├── kafka │ ├── README.rst │ ├── testcontainers │ │ └── kafka │ │ │ ├── __init__.py │ │ │ └── _redpanda.py │ └── tests │ │ ├── test_kafka.py │ │ └── test_redpanda.py ├── keycloak │ ├── README.rst │ ├── testcontainers │ │ └── keycloak │ │ │ └── __init__.py │ └── tests │ │ └── test_keycloak.py ├── localstack │ ├── README.rst │ ├── testcontainers │ │ └── localstack │ │ │ └── __init__.py │ └── tests │ │ └── test_localstack.py ├── mailpit │ ├── README.rst │ ├── testcontainers │ │ └── mailpit │ │ │ ├── __init__.py │ │ │ └── py.typed │ └── tests │ │ └── test_mailpit.py ├── memcached │ ├── README.rst │ ├── setup.py │ ├── testcontainers │ │ └── memcached │ │ │ └── __init__.py │ └── tests │ │ └── test_memcached.py ├── milvus │ ├── README.rst │ ├── testcontainers │ │ └── milvus │ │ │ └── __init__.py │ └── tests │ │ └── test_milvus.py ├── minio │ ├── README.rst │ ├── testcontainers │ │ └── minio │ │ │ └── __init__.py │ └── tests │ │ └── test_minio.py ├── mongodb │ ├── README.rst │ ├── testcontainers │ │ └── mongodb │ │ │ └── __init__.py │ └── tests │ │ └── test_mongodb.py ├── mqtt │ ├── README.rst │ ├── testcontainers │ │ └── mqtt │ │ │ ├── __init__.py │ │ │ └── testcontainers-mosquitto-default-configuration.conf │ └── tests │ │ └── test_mosquitto.py ├── mssql │ ├── README.rst │ ├── testcontainers │ │ └── mssql │ │ │ └── __init__.py │ └── tests │ │ └── test_mssql.py ├── mysql │ ├── README.rst │ ├── testcontainers │ │ └── mysql │ │ │ └── __init__.py │ └── tests │ │ ├── seeds │ │ ├── 01-schema.sql │ │ └── 02-seeds.sql │ │ └── test_mysql.py ├── nats │ ├── README.rst │ ├── testcontainers │ │ └── nats │ │ │ └── __init__.py │ └── tests │ │ └── test_nats.py ├── neo4j │ ├── README.rst │ ├── testcontainers │ │ └── neo4j │ │ │ └── __init__.py │ └── tests │ │ └── test_neo4j.py ├── nginx │ ├── README.rst │ ├── testcontainers │ │ └── nginx │ │ │ └── __init__.py │ └── tests │ │ └── test_nginx.py ├── ollama │ ├── README.rst │ ├── testcontainers │ │ └── ollama │ │ │ └── __init__.py │ └── tests │ │ └── test_ollama.py ├── opensearch │ ├── README.rst │ ├── testcontainers │ │ └── opensearch │ │ │ └── __init__.py │ └── tests │ │ └── test_opensearch.py ├── oracle-free │ ├── README.rst │ ├── testcontainers │ │ └── oracle │ │ │ └── __init__.py │ └── tests │ │ └── test_oracle.py ├── postgres │ ├── README.rst │ ├── testcontainers │ │ └── postgres │ │ │ └── __init__.py │ └── tests │ │ ├── fixtures │ │ └── postgres_create_example_table.sql │ │ └── test_postgres.py ├── qdrant │ ├── README.rst │ ├── testcontainers │ │ └── qdrant │ │ │ └── __init__.py │ └── tests │ │ ├── test_config.yaml │ │ └── test_qdrant.py ├── rabbitmq │ ├── README.rst │ ├── testcontainers │ │ └── rabbitmq │ │ │ └── __init__.py │ └── tests │ │ └── test_rabbitmq.py ├── redis │ ├── README.rst │ ├── testcontainers │ │ └── redis │ │ │ └── __init__.py │ └── tests │ │ └── test_redis.py ├── registry │ ├── README.rst │ ├── testcontainers │ │ └── registry │ │ │ └── __init__.py │ └── tests │ │ └── test_registry.py ├── scylla │ ├── README.rst │ ├── testcontainers │ │ └── scylla │ │ │ └── __init__.py │ └── tests │ │ └── test_scylla.py ├── selenium │ ├── README.rst │ ├── testcontainers │ │ └── selenium │ │ │ ├── __init__.py │ │ │ └── video.py │ └── tests │ │ └── test_selenium.py ├── sftp │ ├── README.rst │ ├── testcontainers │ │ └── sftp │ │ │ ├── __init__.py │ │ │ └── py.typed │ └── tests │ │ └── test_sftp.py ├── test_module_import │ ├── README.rst │ ├── testcontainers │ │ └── test_module_import │ │ │ ├── __init__.py │ │ │ └── new_sub_module.py │ └── tests │ │ └── test_mock_one.py ├── trino │ ├── README.rst │ ├── testcontainers │ │ └── trino │ │ │ └── __init__.py │ └── tests │ │ └── test_trino.py ├── vault │ ├── README.rst │ ├── testcontainers │ │ └── vault │ │ │ └── __init__.py │ └── tests │ │ └── test_vault.py └── weaviate │ ├── README.rst │ ├── testcontainers │ └── weaviate │ │ └── __init__.py │ └── tests │ └── test_weaviate.py ├── poetry.lock ├── pyproject.toml └── scripts ├── diagnostics.py └── mypy_report.py /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/devcontainers/python:1-3.11-bookworm 2 | 3 | 4 | RUN \ 5 | apt update && apt install bash-completion -y && \ 6 | pip install pre-commit && \ 7 | curl -sSL https://install.python-poetry.org | POETRY_HOME=/home/vscode/.local python3 - 8 | 9 | 10 | RUN \ 11 | echo >> /home/vscode/.bashrc && \ 12 | # add completions to bashrc 13 | # see how ubuntu does it for reference: 14 | # https://git.launchpad.net/ubuntu/+source/base-files/tree/share/dot.bashrc 15 | # https://stackoverflow.com/a/68566555 16 | echo 'if [ -f /etc/bash_completion ] && ! shopt -oq posix; then' >> /home/vscode/.bashrc && \ 17 | echo ' . /etc/bash_completion' >> /home/vscode/.bashrc && \ 18 | echo 'fi' >> /home/vscode/.bashrc && \ 19 | echo >> /home/vscode/.bashrc && \ 20 | echo '. <(poetry completions)' >> /home/vscode/.bashrc 21 | -------------------------------------------------------------------------------- /.devcontainer/commands/post-create-command.sh: -------------------------------------------------------------------------------- 1 | echo "Running post-create-command.sh" 2 | 3 | pre-commit install 4 | poetry install --all-extras 5 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the 2 | // README at: https://github.com/devcontainers/templates/tree/main/src/python 3 | { 4 | "name": "Python 3", 5 | // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile 6 | "build": { 7 | // Path is relative to the devcontainer.json file. 8 | // We prebuild the image to get poetry into the image 9 | // This saves the user a bit of time, when re-opening containers 10 | "dockerfile": "Dockerfile" 11 | }, 12 | 13 | "features": { 14 | "ghcr.io/devcontainers/features/docker-in-docker:2": { 15 | "version": "latest", 16 | "dockerDashComposeVersion": "v2" 17 | } 18 | }, 19 | 20 | // Features to add to the dev container. More info: https://containers.dev/features. 21 | // "features": {}, 22 | 23 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 24 | // "forwardPorts": [], 25 | 26 | // Use 'postCreateCommand' to run commands after the container is created. 27 | "postCreateCommand": ".devcontainer/commands/post-create-command.sh", 28 | "customizations": { 29 | "vscode": { 30 | "extensions": [ 31 | "ms-python.python" 32 | ] 33 | } 34 | } 35 | 36 | // Configure tool-specific properties. 37 | // "customizations": {}, 38 | 39 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. 40 | // "remoteUser": "root" 41 | } 42 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Shell scripts 2 | *.sh text eol=lf 3 | -------------------------------------------------------------------------------- /.github/.release-please-manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | ".": "4.10.0" 3 | } 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug-or-unexpected-behavior.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug or unexpected behavior 3 | about: Create a report to help us improve. 4 | title: 'Bug: ' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | 12 | A clear and concise description of what the bug is. What did you expect to happen? What happened instead? 13 | 14 | **To Reproduce** 15 | 16 | Provide a self-contained code snippet that illustrates the bug or unexpected behavior. Ideally, send a Pull Request to illustrate with a test that illustrates the problem. 17 | 18 | ```python 19 | raise RuntimeError("something went wrong") 20 | ``` 21 | 22 | **Runtime environment** 23 | 24 | Provide a summary of your runtime environment. Which operating system, python version, and docker version are you using? What is the version of `testcontainers-python` you are using? You can run the following commands to get the relevant information. 25 | 26 | ```bash 27 | # Get the operating system information (on a unix os). 28 | $ uname -a 29 | # Get the python version. 30 | $ python --version 31 | # Get the docker version and other docker information. 32 | $ docker info 33 | # Get all python packages. 34 | $ pip freeze 35 | ``` 36 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-proposal.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Proposal 3 | about: Send a note to the tc-python team about something you would like to see changed or improved. 4 | title: 'Feature: ' 5 | labels: '🚀 enhancement' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | **What are you trying to do?** 13 | 14 | Describe the intention of the enhancement. 15 | 16 | **Why should it be done this way?** 17 | 18 | Describe the motivation of the enhancement. 19 | 20 | **Other references:** 21 | 22 | Include any other relevant reading material about the enhancement. 23 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/new-container.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: New Container 3 | about: Tell the Testcontainers-Python team about a container you'd like to have support for. 4 | title: 'New Container: ' 5 | labels: '🚀 enhancement' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | **What is the new container you'd like to have?** 13 | 14 | Please link some docker containers as well as documentation/arguments to the benefits of having this container. 15 | 16 | **Why not just use a generic container for this?** 17 | 18 | Please describe why the `DockerContainer("my-image:latest")` approach is not useful enough. 19 | 20 | Having a dedicated `TestContainer` usually means the need for some or all of these: 21 | - complicated setup/configuration 22 | - the wait strategy is complex for the container, usually more than just an http wait 23 | 24 | **Other references:** 25 | 26 | Include any other relevant reading material about the enhancement. 27 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question 3 | about: Ask a question about how to use this library. 4 | title: 'Question: ' 5 | labels: '📖 documentation' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | ## What are you trying to do? 13 | 14 | Ask your question here 15 | 16 | ## Where are you trying to do it? 17 | 18 | Provide a self-contained code snippet that illustrates the bug or unexpected behavior. 19 | Ideally, include a link to a public repository with a minimal project where someone from the 20 | testcontainers-python can submit a PR with a solution to the problem you are facing with the library. 21 | 22 | ## Runtime environment 23 | 24 | Provide a summary of your runtime environment. Which operating system, python version, and docker version are you using? 25 | What is the version of `testcontainers-python` you are using? You can run the following commands to get the relevant information. 26 | 27 | Paste the results of the bash below 28 | 29 | ```bash 30 | uname -a 31 | echo "------" 32 | docker info 33 | echo "------" 34 | poetry run python --version 35 | echo "------" 36 | poetry show --tree 37 | ``` 38 | 39 | ```bash 40 | paste-me-here 41 | ``` 42 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE/new_container.md: -------------------------------------------------------------------------------- 1 | # New Container 2 | 3 | 4 | 5 | Fixes ... 6 | 7 | 11 | 12 | 13 | # PR Checklist 14 | 15 | - [ ] Your PR title follows the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) syntax 16 | as we make use of this for detecting Semantic Versioning changes. 17 | - Additions to the community modules do not contribute to SemVer scheme: 18 | all community features will be tagged [community-feat](https://github.com/testcontainers/testcontainers-python/issues?q=label%3Acommunity-feat+), 19 | but we do not want to release minor or major versions due to features or breaking changes outside of core. 20 | So please use `fix(postgres):` or `fix(my_new_vector_db):` if you want to add or modify community modules. 21 | This may change in the future if we have a separate package released with community modules. 22 | - [ ] Your PR allows maintainers to edit your branch, this will speed up resolving minor issues! 23 | - [ ] The new container is implemented under `modules/*` 24 | - Your module follows [PEP 420](https://peps.python.org/pep-0420/) with implicit namespace packages 25 | (if unsure, look at other existing community modules) 26 | - Your package namespacing follows `testcontainers..*` 27 | and you DO NOT have an `__init__.py` above your module's level. 28 | - Your module has its own tests under `modules/*/tests` 29 | - Your module has a `README.rst` and hooks in the `.. auto-class` and `.. title` of your container 30 | - Implement the new feature (typically in `__init__.py`) and corresponding tests. 31 | - [ ] Your module is added in `pyproject.toml` 32 | - it is declared under `tool.poetry.packages` - see other community modules 33 | - it is declared under `tool.poetry.extras` with the same name as your module name, 34 | we still prefer adding _NO EXTRA DEPENDENCIES_, meaning `mymodule = []` is the preferred addition 35 | (see the notes at the bottom) 36 | - [ ] Your branch is up-to-date (or your branch will be rebased with `git rebase`) 37 | 38 | # Preferred implementation 39 | 40 | - The current consensus among maintainers is to try to avoid enforcing the client library 41 | for the given tools you are triyng to implement. 42 | - This means we want you to avoid adding specific libraries as dependencies to `testcontainers`. 43 | - Therefore, you should implement the configuration and the waiting with as little extra as possible 44 | - You may still find it useful to add your preferred client library as a dev dependency 45 | -------------------------------------------------------------------------------- /.github/actions/setup-env/action.yml: -------------------------------------------------------------------------------- 1 | name: setup-env 2 | description: set up the python environment 3 | 4 | inputs: 5 | python-version: 6 | description: "The python version to install and use" 7 | default: "3.12" # we default to latest supported 8 | required: false 9 | 10 | runs: 11 | using: composite 12 | steps: 13 | - name: Setup Poetry 14 | run: pipx install poetry 15 | shell: bash 16 | - name: Setup python ${{ inputs.python-version }} 17 | uses: actions/setup-python@v5 18 | with: 19 | python-version: ${{ inputs.python-version }} 20 | cache: poetry 21 | -------------------------------------------------------------------------------- /.github/release-please-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "release-type": "python", 3 | "packages": { 4 | ".": { 5 | "package-name": "testcontainers" 6 | } 7 | }, 8 | "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json" 9 | } 10 | -------------------------------------------------------------------------------- /.github/workflows/ci-community.yml: -------------------------------------------------------------------------------- 1 | # Contrinuous Integration for community modules 2 | 3 | name: modules 4 | 5 | on: 6 | push: 7 | branches: [ main ] 8 | paths: 9 | - "modules/**" 10 | pull_request: 11 | branches: [ main ] 12 | paths: 13 | - "modules/**" 14 | 15 | jobs: 16 | track-modules: 17 | runs-on: ubuntu-22.04 18 | steps: 19 | - name: Checkout contents 20 | uses: actions/checkout@v4 21 | with: 22 | fetch-depth: 0 23 | - name: Get changed files 24 | id: changed-files 25 | uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 26 | with: 27 | base: ${{ github.ref }} 28 | list-files: 'json' 29 | filters: | 30 | modules: 31 | - 'modules/**' 32 | - name: Compute modules from files 33 | id: compute-changes 34 | run: | 35 | modules=$(echo "${{ toJson(steps.changed-files.outputs.modules_files) }}" | jq '.[] | split("/") | nth(1)' | jq -s -c '. | unique') 36 | echo "computed_modules=$modules" 37 | echo "computed_modules=$modules" >> $GITHUB_OUTPUT 38 | outputs: 39 | changed_modules: ${{ steps.compute-changes.outputs.computed_modules }} 40 | test: 41 | runs-on: ubuntu-22.04 42 | needs: [track-modules] 43 | if: ${{ needs.track-modules.outputs.changed_modules != '[]' }} 44 | strategy: 45 | fail-fast: false 46 | matrix: 47 | python-version: ["3.9", "3.10", "3.11", "3.12"] 48 | module: ${{ fromJSON(needs.track-modules.outputs.changed_modules) }} 49 | steps: 50 | - name: Checkout contents 51 | uses: actions/checkout@v4 52 | - name: Set up Python 53 | uses: ./.github/actions/setup-env 54 | with: 55 | python-version: ${{ matrix.python-version }} 56 | - name: Install Python dependencies 57 | run: poetry install -E ${{ matrix.module }} 58 | - name: Run tests 59 | run: make modules/${{ matrix.module }}/tests 60 | - name: Run doctests 61 | run: make modules/${{ matrix.module }}/doctests 62 | -------------------------------------------------------------------------------- /.github/workflows/ci-core.yml: -------------------------------------------------------------------------------- 1 | # Continuous Integration for the core package 2 | 3 | name: core 4 | 5 | on: 6 | push: 7 | branches: [main] 8 | pull_request: 9 | branches: [main] 10 | 11 | jobs: 12 | run-tests-and-coverage: 13 | runs-on: ubuntu-22.04 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | python-version: ["3.9", "3.10", "3.11", "3.12"] 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Set up Python 21 | uses: ./.github/actions/setup-env 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | - name: Install Python dependencies 25 | run: poetry install --all-extras 26 | - name: Run twine check 27 | run: poetry build && poetry run twine check dist/*.tar.gz 28 | - name: Set up Docker 29 | uses: docker/setup-docker-action@v4 30 | - name: Run tests 31 | run: make core/tests 32 | - name: Rename coverage file 33 | run: mv .coverage .coverage.${{ matrix.python-version}} 34 | - name: "Save coverage artifact" 35 | uses: actions/upload-artifact@v4 36 | with: 37 | name: "coverage-artifact-${{ matrix.python-version}}" 38 | include-hidden-files: true 39 | path: ".coverage.*" 40 | retention-days: 1 41 | - name: Run doctests 42 | run: make core/doctests 43 | 44 | coverage-compile: 45 | needs: "run-tests-and-coverage" 46 | runs-on: ubuntu-22.04 47 | steps: 48 | - uses: actions/checkout@v4 49 | - name: Set up Python 50 | uses: ./.github/actions/setup-env 51 | - name: Install Python dependencies 52 | run: poetry install --all-extras 53 | - name: "Download coverage artifacts" 54 | uses: actions/download-artifact@v4 55 | with: 56 | pattern: "coverage-artifact-*" 57 | merge-multiple: true 58 | - name: Compile coverage 59 | run: make coverage 60 | - name: Upload coverage to Codecov 61 | uses: codecov/codecov-action@v4 62 | with: 63 | token: ${{ secrets.CODECOV_TOKEN }} 64 | -------------------------------------------------------------------------------- /.github/workflows/ci-lint.yml: -------------------------------------------------------------------------------- 1 | # Contrinuous Integration for the core package 2 | 3 | name: lint 4 | 5 | on: 6 | push: 7 | branches: [main] 8 | pull_request: 9 | branches: [main] 10 | 11 | jobs: 12 | python: 13 | runs-on: ubuntu-22.04 14 | steps: 15 | - uses: actions/checkout@v4 16 | - name: Setup Env 17 | uses: ./.github/actions/setup-env 18 | with: 19 | python-version: "3.9" # the pre-commit is hooked in as 3.9 20 | - name: Install Python dependencies 21 | run: poetry install --no-interaction 22 | - name: Execute pre-commit handler 23 | run: | 24 | poetry run pre-commit run check-toml 25 | poetry run pre-commit run trailing-whitespace 26 | poetry run pre-commit run end-of-file-fixer 27 | poetry run pre-commit run ruff 28 | poetry run pre-commit run ruff-format 29 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: docs 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-22.04 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Set up Python 15 | uses: ./.github/actions/setup-env 16 | with: 17 | python-version: "3.11" 18 | - name: Install Python dependencies 19 | run: poetry install --all-extras 20 | - name: Build documentation 21 | run: make docs 22 | -------------------------------------------------------------------------------- /.github/workflows/pr-lint.yml: -------------------------------------------------------------------------------- 1 | name: lint-pr 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | 10 | permissions: 11 | pull-requests: read 12 | 13 | jobs: 14 | validate: 15 | name: validate-pull-request-title 16 | runs-on: ubuntu-22.04 17 | steps: 18 | - name: validate pull request title 19 | uses: kontrolplane/pull-request-title-validator@ab2b54babb5337246f4b55cf8e0a1ecb0575e46d #v1 20 | -------------------------------------------------------------------------------- /.github/workflows/release-please.yml: -------------------------------------------------------------------------------- 1 | name: Release Please 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | 7 | jobs: 8 | release: 9 | runs-on: ubuntu-22.04 10 | outputs: 11 | release_created: ${{ steps.track-release.outputs.release_created }} 12 | steps: 13 | - uses: google-github-actions/release-please-action@v4 14 | id: track-release 15 | with: 16 | manifest-file: .github/.release-please-manifest.json 17 | config-file: .github/release-please-config.json 18 | publish: 19 | runs-on: ubuntu-22.04 20 | environment: release 21 | permissions: 22 | id-token: write 23 | needs: 24 | - release 25 | if: ${{ needs.release.outputs.release_created }} 26 | steps: 27 | - uses: actions/checkout@v4 28 | - name: Set up Python 29 | uses: ./.github/actions/setup-env 30 | - name: build package 31 | run: poetry build 32 | # this action uploads packages from the `dist/` directory, which poetry has built in the previous step 33 | # usable once we set up trusted publishing, see https://docs.pypi.org/trusted-publishers/using-a-publisher/ 34 | - name: push package 35 | uses: pypa/gh-action-pypi-publish@release/v1 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | bin/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # Installer logs 26 | pip-log.txt 27 | pip-delete-this-directory.txt 28 | 29 | # Unit test / coverage reports 30 | htmlcov/ 31 | .tox/ 32 | .coverage 33 | .cache 34 | nosetests.xml 35 | coverage.xml 36 | 37 | # Translations 38 | *.mo 39 | 40 | # Mr Developer 41 | .mr.developer.cfg 42 | .project 43 | .pydevproject 44 | 45 | # Rope 46 | .ropeproject 47 | 48 | # Django stuff: 49 | *.log 50 | *.pot 51 | 52 | # Sphinx documentation 53 | docs/_build/ 54 | 55 | *.iml 56 | 57 | *COMMIT_MSG 58 | 59 | # QuickBuild 60 | .qbcache/ 61 | 62 | .noseids 63 | .idea/ 64 | .venv/ 65 | venv 66 | .testrepository/ 67 | 68 | # vscode: 69 | .vscode/ 70 | 71 | .DS_Store 72 | .python-version 73 | .env 74 | .github-token 75 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3 3 | 4 | repos: 5 | - repo: https://github.com/pre-commit/pre-commit-hooks 6 | rev: 'v4.5.0' 7 | hooks: 8 | - id: check-toml 9 | - id: trailing-whitespace 10 | - id: end-of-file-fixer 11 | 12 | - repo: https://github.com/astral-sh/ruff-pre-commit 13 | rev: 'v0.11.5' 14 | hooks: 15 | - id: ruff 16 | # Explicitly setting config to prevent Ruff from using `pyproject.toml` in sub packages. 17 | args: [ '--fix', '--exit-non-zero-on-fix', '--config', 'pyproject.toml' ] 18 | - id: ruff-format 19 | args: [ '--config', 'pyproject.toml' ] 20 | 21 | - repo: local 22 | hooks: 23 | - id: mypy 24 | name: mypy 25 | entry: poetry run mypy 26 | args: ["--config-file", "pyproject.toml"] 27 | files: "core" # start with the core being type checked 28 | language: system 29 | types: [ python ] 30 | require_serial: true 31 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file (see https://docs.readthedocs.io/en/stable/config-file/v2.html 2 | # for details). 3 | 4 | version: 2 5 | 6 | sphinx: 7 | configuration: conf.py 8 | 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.10" 13 | 14 | # https://github.com/readthedocs/readthedocs.org/issues/4912#issuecomment-1143587902s 15 | jobs: 16 | post_install: 17 | - pip install poetry==2.1.2 # match version from poetry.lock 18 | - poetry config virtualenvs.create false 19 | - poetry install --all-extras 20 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PYTHON_VERSION=3.10 2 | FROM python:${PYTHON_VERSION}-slim-bookworm 3 | 4 | ENV POETRY_NO_INTERACTION=1 \ 5 | POETRY_VIRTUALENVS_IN_PROJECT=1 \ 6 | POETRY_VIRTUALENVS_CREATE=1 \ 7 | POETRY_CACHE_DIR=/tmp/poetry_cache 8 | 9 | WORKDIR /workspace 10 | RUN pip install --upgrade pip \ 11 | && apt-get update \ 12 | && apt-get install -y freetds-dev \ 13 | && apt-get install -y make \ 14 | # no real need for keeping this image small at the moment 15 | && :; # rm -rf /var/lib/apt/lists/* 16 | 17 | # install poetry 18 | RUN bash -c 'python -m venv /opt/poetry-venv && source $_/bin/activate && pip install poetry && ln -s $(which poetry) /usr/bin' 19 | 20 | # install dependencies with poetry 21 | COPY pyproject.toml . 22 | COPY poetry.lock . 23 | RUN poetry install --all-extras --with dev --no-root 24 | 25 | # copy project source 26 | COPY . . 27 | 28 | # install project with poetry 29 | RUN poetry install --all-extras --with dev 30 | -------------------------------------------------------------------------------- /Dockerfile.diagnostics: -------------------------------------------------------------------------------- 1 | ARG version=3.8 2 | FROM python:${version} 3 | 4 | WORKDIR /workspace 5 | COPY core core 6 | RUN pip install --no-cache-dir -e core 7 | COPY scripts/diagnostics.py . 8 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .DEFAULT_GOAL := help 2 | 3 | 4 | PYTHON_VERSION ?= 3.10 5 | IMAGE = testcontainers-python:${PYTHON_VERSION} 6 | PACKAGES = core $(addprefix modules/,$(notdir $(wildcard modules/*))) 7 | 8 | UPLOAD = $(addsuffix /upload,${PACKAGES}) 9 | TESTS = $(addsuffix /tests,$(filter-out meta,${PACKAGES})) 10 | TESTS_DIND = $(addsuffix -dind,${TESTS}) 11 | DOCTESTS = $(addsuffix /doctests,$(filter-out modules/README.md,${PACKAGES})) 12 | 13 | 14 | install: ## Set up the project for development 15 | poetry install --all-extras 16 | poetry run pre-commit install 17 | 18 | build: ## Build the python package 19 | poetry build && poetry run twine check dist/* 20 | 21 | tests: ${TESTS} ## Run tests for each package 22 | ${TESTS}: %/tests: 23 | poetry run pytest -v --cov=testcontainers.$* $*/tests 24 | 25 | coverage: ## Target to combine and report coverage. 26 | poetry run coverage combine 27 | poetry run coverage report 28 | poetry run coverage xml 29 | poetry run coverage html 30 | 31 | lint: ## Lint all files in the project, which we also run in pre-commit 32 | poetry run pre-commit run -a 33 | 34 | mypy-core-report: 35 | poetry run mypy --config-file pyproject.toml core | poetry run python scripts/mypy_report.py 36 | 37 | docs: ## Build the docs for the project 38 | poetry run sphinx-build -nW . docs/_build 39 | 40 | # Target to build docs watching for changes as per https://stackoverflow.com/a/21389615 41 | docs-watch : 42 | poetry run sphinx-autobuild . docs/_build # requires 'pip install sphinx-autobuild' 43 | 44 | doctests: ${DOCTESTS} ## Run doctests found across the documentation. 45 | poetry run sphinx-build -b doctest . docs/_build 46 | 47 | ${DOCTESTS}: %/doctests: ## Run doctests found for a module. 48 | poetry run sphinx-build -b doctest -c doctests $* docs/_build 49 | 50 | 51 | clean: ## Remove generated files. 52 | rm -rf docs/_build 53 | rm -rf build 54 | rm -rf dist 55 | rm -rf */*.egg-info 56 | 57 | clean-all: clean ## Remove all generated files and reset the local virtual environment 58 | rm -rf .venv 59 | 60 | # Targets that do not generate file-level artifacts. 61 | .PHONY: clean docs doctests image tests ${TESTS} 62 | 63 | 64 | # Implements this pattern for autodocumenting Makefiles: 65 | # https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html 66 | # 67 | # Picks up all comments that start with a ## and are at the end of a target definition line. 68 | .PHONY: help 69 | help: ## Display command usage 70 | @grep -E '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' 71 | -------------------------------------------------------------------------------- /core/README.rst: -------------------------------------------------------------------------------- 1 | Testcontainers Core 2 | =================== 3 | 4 | :code:`testcontainers-core` is the core functionality for spinning up Docker containers in test environments. 5 | 6 | .. autoclass:: testcontainers.core.container.DockerContainer 7 | :members: with_bind_ports, with_exposed_ports 8 | 9 | .. note:: 10 | When using `with_bind_ports` or `with_exposed_ports` 11 | you can specify the port in the following formats: :code:`{private_port}/{protocol}` 12 | 13 | e.g. `8080/tcp` or `8125/udp` or just `8080` (default protocol is tcp) 14 | 15 | For legacy reasons, the port can be an *integer* 16 | 17 | .. autoclass:: testcontainers.core.image.DockerImage 18 | 19 | .. autoclass:: testcontainers.core.generic.DbContainer 20 | 21 | .. autoclass:: testcontainers.core.network.Network 22 | 23 | .. raw:: html 24 | 25 |
26 | 27 | Examples 28 | -------- 29 | 30 | Using `DockerContainer` and `DockerImage` to create a container: 31 | 32 | .. doctest:: 33 | 34 | >>> from testcontainers.core.container import DockerContainer 35 | >>> from testcontainers.core.waiting_utils import wait_for_logs 36 | >>> from testcontainers.core.image import DockerImage 37 | 38 | >>> with DockerImage(path="./core/tests/image_fixtures/sample/", tag="test-sample:latest") as image: 39 | ... with DockerContainer(str(image)) as container: 40 | ... delay = wait_for_logs(container, "Test Sample Image") 41 | 42 | The `DockerImage` class is used to build the image from the specified path and tag. 43 | The `DockerContainer` class is then used to create a container from the image. 44 | -------------------------------------------------------------------------------- /core/testcontainers/compose/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa: F401 2 | from testcontainers.compose.compose import ( 3 | ComposeContainer, 4 | ContainerIsNotRunning, 5 | DockerCompose, 6 | NoSuchPortExposed, 7 | PublishedPort, 8 | ) 9 | -------------------------------------------------------------------------------- /core/testcontainers/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testcontainers/testcontainers-python/5f34ad0e934a83b49c14b5b0d63284448eac1940/core/testcontainers/core/__init__.py -------------------------------------------------------------------------------- /core/testcontainers/core/auth.py: -------------------------------------------------------------------------------- 1 | import base64 as base64 2 | import json as json 3 | from collections import namedtuple 4 | from logging import warning 5 | from typing import Any, Optional 6 | 7 | DockerAuthInfo = namedtuple("DockerAuthInfo", ["registry", "username", "password"]) 8 | 9 | _AUTH_WARNINGS = { 10 | "credHelpers": "DOCKER_AUTH_CONFIG is experimental, credHelpers not supported yet", 11 | "credsStore": "DOCKER_AUTH_CONFIG is experimental, credsStore not supported yet", 12 | } 13 | 14 | 15 | def process_docker_auth_config_encoded(auth_config_dict: dict[str, dict[str, dict[str, Any]]]) -> list[DockerAuthInfo]: 16 | """ 17 | Process the auths config. 18 | 19 | Example: 20 | { 21 | "auths": { 22 | "https://index.docker.io/v1/": { 23 | "auth": "dXNlcm5hbWU6cGFzc3dvcmQ=" 24 | } 25 | } 26 | } 27 | 28 | Returns a list of DockerAuthInfo objects. 29 | """ 30 | auth_info: list[DockerAuthInfo] = [] 31 | 32 | auths = auth_config_dict.get("auths") 33 | if not auths: 34 | raise KeyError("No auths found in the docker auth config") 35 | 36 | for registry, auth in auths.items(): 37 | auth_str = str(auth.get("auth")) 38 | auth_str = base64.b64decode(auth_str).decode("utf-8") 39 | username, password = auth_str.split(":") 40 | auth_info.append(DockerAuthInfo(registry, username, password)) 41 | 42 | return auth_info 43 | 44 | 45 | def process_docker_auth_config_cred_helpers(auth_config_dict: dict[str, Any]) -> None: 46 | """ 47 | Process the credHelpers config. 48 | 49 | Example: 50 | { 51 | "credHelpers": { 52 | ".dkr.ecr..amazonaws.com": "ecr-login" 53 | } 54 | } 55 | 56 | This is not supported yet. 57 | """ 58 | if "credHelpers" in _AUTH_WARNINGS: 59 | warning(_AUTH_WARNINGS.pop("credHelpers")) 60 | 61 | 62 | def process_docker_auth_config_store(auth_config_dict: dict[str, Any]) -> None: 63 | """ 64 | Process the credsStore config. 65 | 66 | Example: 67 | { 68 | "credsStore": "ecr-login" 69 | } 70 | 71 | This is not supported yet. 72 | """ 73 | if "credsStore" in _AUTH_WARNINGS: 74 | warning(_AUTH_WARNINGS.pop("credsStore")) 75 | 76 | 77 | def parse_docker_auth_config(auth_config: str) -> Optional[list[DockerAuthInfo]]: 78 | """Parse the docker auth config from a string and handle the different formats.""" 79 | try: 80 | auth_config_dict: dict[str, Any] = json.loads(auth_config) 81 | if "credHelpers" in auth_config: 82 | process_docker_auth_config_cred_helpers(auth_config_dict) 83 | if "credsStore" in auth_config: 84 | process_docker_auth_config_store(auth_config_dict) 85 | if "auths" in auth_config: 86 | return process_docker_auth_config_encoded(auth_config_dict) 87 | 88 | except (json.JSONDecodeError, KeyError, ValueError) as exp: 89 | raise ValueError("Could not parse docker auth config") from exp 90 | 91 | return None 92 | -------------------------------------------------------------------------------- /core/testcontainers/core/exceptions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | 14 | 15 | class ContainerStartException(RuntimeError): 16 | pass 17 | 18 | 19 | class ContainerConnectException(RuntimeError): 20 | pass 21 | 22 | 23 | class ContainerIsNotRunning(RuntimeError): 24 | pass 25 | 26 | 27 | class NoSuchPortExposed(RuntimeError): 28 | pass 29 | -------------------------------------------------------------------------------- /core/testcontainers/core/generic.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | from typing import Any, Optional 14 | from urllib.parse import quote 15 | 16 | from testcontainers.core.container import DockerContainer 17 | from testcontainers.core.exceptions import ContainerStartException 18 | from testcontainers.core.utils import raise_for_deprecated_parameter 19 | from testcontainers.core.waiting_utils import wait_container_is_ready 20 | 21 | ADDITIONAL_TRANSIENT_ERRORS = [] 22 | try: 23 | from sqlalchemy.exc import DBAPIError 24 | 25 | ADDITIONAL_TRANSIENT_ERRORS.append(DBAPIError) 26 | except ImportError: 27 | pass 28 | 29 | 30 | class DbContainer(DockerContainer): 31 | """ 32 | **DEPRECATED (for removal)** 33 | 34 | Generic database container. 35 | """ 36 | 37 | @wait_container_is_ready(*ADDITIONAL_TRANSIENT_ERRORS) 38 | def _connect(self) -> None: 39 | import sqlalchemy 40 | 41 | engine = sqlalchemy.create_engine(self.get_connection_url()) 42 | try: 43 | engine.connect() 44 | finally: 45 | engine.dispose() 46 | 47 | def get_connection_url(self) -> str: 48 | raise NotImplementedError 49 | 50 | def _create_connection_url( 51 | self, 52 | dialect: str, 53 | username: str, 54 | password: str, 55 | host: Optional[str] = None, 56 | port: Optional[int] = None, 57 | dbname: Optional[str] = None, 58 | **kwargs: Any, 59 | ) -> str: 60 | if raise_for_deprecated_parameter(kwargs, "db_name", "dbname"): 61 | raise ValueError(f"Unexpected arguments: {','.join(kwargs)}") 62 | if self._container is None: 63 | raise ContainerStartException("container has not been started") 64 | host = host or self.get_container_host_ip() 65 | port = self.get_exposed_port(port) 66 | quoted_password = quote(password, safe=" +") 67 | url = f"{dialect}://{username}:{quoted_password}@{host}:{port}" 68 | if dbname: 69 | url = f"{url}/{dbname}" 70 | return url 71 | 72 | def start(self) -> "DbContainer": 73 | self._configure() 74 | super().start() 75 | self._transfer_seed() 76 | self._connect() 77 | return self 78 | 79 | def _configure(self) -> None: 80 | raise NotImplementedError 81 | 82 | def _transfer_seed(self) -> None: 83 | pass 84 | -------------------------------------------------------------------------------- /core/testcontainers/core/labels.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | from typing import Optional 3 | from uuid import uuid4 4 | 5 | from testcontainers.core.config import testcontainers_config as c 6 | 7 | SESSION_ID: str = str(uuid4()) 8 | TESTCONTAINERS_NAMESPACE = "org.testcontainers" 9 | 10 | LABEL_TESTCONTAINERS = TESTCONTAINERS_NAMESPACE 11 | LABEL_SESSION_ID = "org.testcontainers.session-id" 12 | LABEL_VERSION = "org.testcontainers.version" 13 | LABEL_LANG = "org.testcontainers.lang" 14 | 15 | 16 | def create_labels(image: str, labels: Optional[dict[str, str]]) -> dict[str, str]: 17 | if labels is None: 18 | labels = {} 19 | else: 20 | for k in labels: 21 | if k.startswith(TESTCONTAINERS_NAMESPACE): 22 | raise ValueError("The org.testcontainers namespace is reserved for internal use") 23 | 24 | tc_labels = { 25 | **labels, 26 | LABEL_LANG: "python", 27 | LABEL_TESTCONTAINERS: "true", 28 | LABEL_VERSION: importlib.metadata.version("testcontainers"), 29 | } 30 | 31 | if image == c.ryuk_image: 32 | return tc_labels 33 | 34 | tc_labels[LABEL_SESSION_ID] = SESSION_ID 35 | return tc_labels 36 | -------------------------------------------------------------------------------- /core/testcontainers/core/network.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | import uuid 14 | from typing import Any, Optional 15 | 16 | from testcontainers.core.docker_client import DockerClient 17 | 18 | 19 | class Network: 20 | """ 21 | Network context manager for programmatically connecting containers. 22 | """ 23 | 24 | def __init__( 25 | self, docker_client_kw: Optional[dict[str, Any]] = None, docker_network_kw: Optional[dict[str, Any]] = None 26 | ): 27 | self.name = str(uuid.uuid4()) 28 | self._docker = DockerClient(**(docker_client_kw or {})) 29 | self._docker_network_kw = docker_network_kw or {} 30 | 31 | def connect(self, container_id: str, network_aliases: Optional[list[str]] = None) -> None: 32 | self._network.connect(container_id, aliases=network_aliases) 33 | 34 | def remove(self) -> None: 35 | self._network.remove() 36 | 37 | def create(self) -> "Network": 38 | self._network = self._docker.client_networks_create(self.name, self._docker_network_kw) 39 | self.id = self._network.id 40 | return self 41 | 42 | def __enter__(self) -> "Network": 43 | return self.create() 44 | 45 | def __exit__(self, exc_type, exc_val, exc_tb) -> None: # type: ignore[no-untyped-def] 46 | self.remove() 47 | -------------------------------------------------------------------------------- /core/testcontainers/core/utils.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import platform 4 | import subprocess 5 | import sys 6 | from pathlib import Path 7 | from typing import Any, Final, Optional 8 | 9 | LINUX = "linux" 10 | MAC = "mac" 11 | WIN = "win" 12 | 13 | 14 | def setup_logger(name: str) -> logging.Logger: 15 | logger = logging.getLogger(name) 16 | logger.setLevel(logging.INFO) 17 | handler = logging.StreamHandler() 18 | handler.setLevel(logging.INFO) 19 | logger.addHandler(handler) 20 | return logger 21 | 22 | 23 | def os_name() -> Optional[str]: 24 | pl = sys.platform 25 | if pl == "linux" or pl == "linux2": 26 | return LINUX 27 | elif pl == "darwin": 28 | return MAC 29 | elif pl == "win32": 30 | return WIN 31 | return None 32 | 33 | 34 | def is_mac() -> bool: 35 | return os_name() == MAC 36 | 37 | 38 | def is_linux() -> bool: 39 | return os_name() == LINUX 40 | 41 | 42 | def is_windows() -> bool: 43 | return os_name() == WIN 44 | 45 | 46 | def is_arm() -> bool: 47 | return platform.machine() in ("arm64", "aarch64") 48 | 49 | 50 | def inside_container() -> bool: 51 | """ 52 | Returns true if we are running inside a container. 53 | 54 | https://github.com/docker/docker/blob/a9fa38b1edf30b23cae3eade0be48b3d4b1de14b/daemon/initlayer/setup_unix.go#L25 55 | """ 56 | return os.path.exists("/.dockerenv") 57 | 58 | 59 | def default_gateway_ip() -> Optional[str]: 60 | """ 61 | Returns gateway IP address of the host that testcontainer process is 62 | running on 63 | 64 | https://github.com/testcontainers/testcontainers-java/blob/3ad8d80e2484864e554744a4800a81f6b7982168/core/src/main/java/org/testcontainers/dockerclient/DockerClientConfigUtils.java#L27 65 | """ 66 | cmd = ["sh", "-c", "ip route|awk '/default/ { print $3 }'"] 67 | try: 68 | process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 69 | ip_address = process.communicate()[0] 70 | if ip_address and process.returncode == 0: 71 | return ip_address.decode("utf-8").strip().strip("\n") 72 | return None 73 | except subprocess.SubprocessError: 74 | return None 75 | 76 | 77 | def raise_for_deprecated_parameter(kwargs: dict[Any, Any], name: str, replacement: str) -> dict[Any, Any]: 78 | """ 79 | Raise an error if a dictionary of keyword arguments contains a key and suggest the replacement. 80 | """ 81 | if kwargs.pop(name, None): 82 | raise ValueError(f"Use `{replacement}` instead of `{name}`") 83 | return kwargs 84 | 85 | 86 | CGROUP_FILE: Final[Path] = Path("/proc/self/cgroup") 87 | 88 | 89 | def get_running_in_container_id() -> Optional[str]: 90 | """ 91 | Get the id of the currently running container 92 | """ 93 | if not CGROUP_FILE.is_file(): 94 | return None 95 | cgroup = CGROUP_FILE.read_text() 96 | for line in cgroup.splitlines(keepends=False): 97 | path = line.rpartition(":")[2] 98 | if path.startswith("/docker"): 99 | return path.removeprefix("/docker/") 100 | return None 101 | -------------------------------------------------------------------------------- /core/testcontainers/core/version.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | 3 | from packaging.version import Version 4 | 5 | 6 | class ComparableVersion: 7 | """A wrapper around packaging.version.Version that allows for comparison with strings""" 8 | 9 | def __init__(self, version: str) -> None: 10 | self.version = Version(version) 11 | 12 | def __lt__(self, other: object) -> bool: 13 | return self._apply_op(other, lambda x, y: x < y) 14 | 15 | def __le__(self, other: object) -> bool: 16 | return self._apply_op(other, lambda x, y: x <= y) 17 | 18 | def __eq__(self, other: object) -> bool: 19 | return self._apply_op(other, lambda x, y: x == y) 20 | 21 | def __ne__(self, other: object) -> bool: 22 | return self._apply_op(other, lambda x, y: x != y) 23 | 24 | def __gt__(self, other: object) -> bool: 25 | return self._apply_op(other, lambda x, y: x > y) 26 | 27 | def __ge__(self, other: object) -> bool: 28 | return self._apply_op(other, lambda x, y: x >= y) 29 | 30 | def _apply_op(self, other: object, op: Callable[[Version, Version], bool]) -> bool: 31 | other = Version(str(other)) 32 | return op(self.version, other) 33 | -------------------------------------------------------------------------------- /core/testcontainers/socat/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa: F401 2 | from testcontainers.socat.socat import SocatContainer 3 | -------------------------------------------------------------------------------- /core/tests/compose_fixtures/basic/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | alpine: 3 | image: alpine:latest 4 | init: true 5 | command: 6 | - sh 7 | - -c 8 | - 'while true; do sleep 0.1 ; date -Ins; done' 9 | -------------------------------------------------------------------------------- /core/tests/compose_fixtures/basic_multiple/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | alpine1: 3 | image: alpine:latest 4 | init: true 5 | command: 6 | - sh 7 | - -c 8 | - 'while true; do sleep 0.1 ; date -Ins; done' 9 | alpine2: 10 | image: alpine:latest 11 | init: true 12 | command: 13 | - sh 14 | - -c 15 | - 'while true; do sleep 0.1 ; date -Ins; done' 16 | -------------------------------------------------------------------------------- /core/tests/compose_fixtures/basic_volume/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | alpine: 3 | image: alpine:latest 4 | init: true 5 | command: 6 | - sh 7 | - -c 8 | - 'while true; do sleep 0.1 ; date -Ins; done' 9 | read_only: true 10 | volumes: 11 | - type: volume 12 | source: my-data 13 | target: /var/lib/example/data 14 | read_only: false 15 | 16 | volumes: 17 | my-data: {} 18 | -------------------------------------------------------------------------------- /core/tests/compose_fixtures/port_multiple/compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | alpine: 3 | image: nginx:alpine-slim 4 | init: true 5 | ports: 6 | - '81' 7 | - '82' 8 | - target: 80 9 | published: "5000-5999" 10 | host_ip: 127.0.0.1 11 | protocol: tcp 12 | command: 13 | - sh 14 | - -c 15 | - 'd=/etc/nginx/conf.d; echo "server { listen 81; location / { return 202; } }" > $$d/81.conf && echo "server { listen 82; location / { return 204; } }" > $$d/82.conf && nginx -g "daemon off;"' 16 | 17 | alpine2: 18 | image: nginx:alpine-slim 19 | init: true 20 | ports: 21 | - target: 80 22 | published: "5000-5999" 23 | host_ip: 127.0.0.1 24 | protocol: tcp 25 | command: 26 | - sh 27 | - -c 28 | - 'd=/etc/nginx/conf.d; echo "server { listen 81; location / { return 202; } }" > $$d/81.conf && echo "server { listen 82; location / { return 204; } }" > $$d/82.conf && nginx -g "daemon off;"' 29 | -------------------------------------------------------------------------------- /core/tests/compose_fixtures/port_single/compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | alpine: 3 | image: nginx:alpine-slim 4 | init: true 5 | ports: 6 | - target: 80 7 | host_ip: 127.0.0.1 8 | protocol: tcp 9 | command: 10 | - sh 11 | - -c 12 | - 'nginx -g "daemon off;"' 13 | -------------------------------------------------------------------------------- /core/tests/compose_fixtures/profile_support/compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | runs-always: &simple-service 3 | image: alpine:latest 4 | init: true 5 | command: 6 | - sh 7 | - -c 8 | - 'while true; do sleep 0.1 ; date -Ins; done' 9 | runs-profile-a: 10 | <<: *simple-service 11 | profiles: 12 | - profile-a 13 | runs-profile-b: 14 | <<: *simple-service 15 | profiles: 16 | - profile-b 17 | -------------------------------------------------------------------------------- /core/tests/conftest.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | from typing import Callable 5 | from testcontainers.core.container import DockerClient 6 | from pprint import pprint 7 | import sys 8 | 9 | PROJECT_DIR = Path(__file__).parent.parent.parent.resolve() 10 | 11 | 12 | def pytest_configure(config: pytest.Config) -> None: 13 | """ 14 | Add configuration for custom pytest markers. 15 | """ 16 | config.addinivalue_line( 17 | "markers", 18 | "inside_docker_check: test used to validate DinD/DooD are working as expected", 19 | ) 20 | 21 | 22 | @pytest.fixture(scope="session") 23 | def python_testcontainer_image() -> str: 24 | """Build an image with test containers python for DinD and DooD tests""" 25 | py_version = ".".join(map(str, sys.version_info[:2])) 26 | image_name = f"testcontainers-python:{py_version}" 27 | client = DockerClient() 28 | client.build( 29 | path=str(PROJECT_DIR), 30 | tag=image_name, 31 | rm=False, 32 | buildargs={"PYTHON_VERSION": py_version}, 33 | ) 34 | return image_name 35 | 36 | 37 | @pytest.fixture 38 | def check_for_image() -> Callable[[str, bool], None]: 39 | """Warp the check_for_image function in a fixture""" 40 | 41 | def _check_for_image(image_short_id: str, cleaned: bool) -> None: 42 | """ 43 | Validates if the image is present or not. 44 | 45 | :param image_short_id: The short id of the image 46 | :param cleaned: True if the image should not be present, False otherwise 47 | """ 48 | client = DockerClient() 49 | images = client.client.images.list() 50 | found = any(image.short_id.endswith(image_short_id) for image in images) 51 | assert found is not cleaned, f"Image {image_short_id} was {'found' if cleaned else 'not found'}" 52 | 53 | return _check_for_image 54 | 55 | 56 | @pytest.fixture 57 | def show_container_attributes() -> None: 58 | """Wrap the show_container_attributes function in a fixture""" 59 | 60 | def _show_container_attributes(container_id: str) -> None: 61 | """Print the attributes of a container""" 62 | client = DockerClient().client 63 | data = client.containers.get(container_id).attrs 64 | pprint(data) 65 | 66 | return _show_container_attributes 67 | -------------------------------------------------------------------------------- /core/tests/image_fixtures/busybox/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM busybox:buildroot-2014.02 2 | VOLUME /data 3 | CMD ["/bin/sh"] 4 | -------------------------------------------------------------------------------- /core/tests/image_fixtures/sample/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:latest 2 | CMD echo "Test Sample Image" 3 | -------------------------------------------------------------------------------- /core/tests/test_auth.py: -------------------------------------------------------------------------------- 1 | import json 2 | import pytest 3 | 4 | from testcontainers.core.auth import parse_docker_auth_config, DockerAuthInfo 5 | 6 | 7 | def test_parse_docker_auth_config_encoded(): 8 | auth_config_json = '{"auths":{"https://index.docker.io/v1/":{"auth":"dXNlcm5hbWU6cGFzc3dvcmQ="}}}' 9 | auth_info = parse_docker_auth_config(auth_config_json) 10 | assert auth_info 11 | assert len(auth_info) == 1 12 | assert auth_info[0] == DockerAuthInfo( 13 | registry="https://index.docker.io/v1/", 14 | username="username", 15 | password="password", 16 | ) 17 | 18 | 19 | def test_parse_docker_auth_config_cred_helpers(): 20 | auth_dict = {"credHelpers": {".dkr.ecr..amazonaws.com": "ecr-login"}} 21 | auth_config_json = json.dumps(auth_dict) 22 | assert parse_docker_auth_config(auth_config_json) is None 23 | 24 | 25 | def test_parse_docker_auth_config_store(): 26 | auth_dict = {"credsStore": "ecr-login"} 27 | auth_config_json = json.dumps(auth_dict) 28 | assert parse_docker_auth_config(auth_config_json) is None 29 | 30 | 31 | def test_parse_docker_auth_config_encoded_multiple(): 32 | auth_dict = { 33 | "auths": { 34 | "localhost:5000": {"auth": "dXNlcjE6cGFzczE=="}, 35 | "https://example.com": {"auth": "dXNlcl9uZXc6cGFzc19uZXc=="}, 36 | "example2.com": {"auth": "YWJjOjEyMw==="}, 37 | } 38 | } 39 | auth_config_json = json.dumps(auth_dict) 40 | auth_info = parse_docker_auth_config(auth_config_json) 41 | assert auth_info 42 | assert len(auth_info) == 3 43 | assert auth_info[0] == DockerAuthInfo( 44 | registry="localhost:5000", 45 | username="user1", 46 | password="pass1", 47 | ) 48 | assert auth_info[1] == DockerAuthInfo( 49 | registry="https://example.com", 50 | username="user_new", 51 | password="pass_new", 52 | ) 53 | assert auth_info[2] == DockerAuthInfo( 54 | registry="example2.com", 55 | username="abc", 56 | password="123", 57 | ) 58 | 59 | 60 | def test_parse_docker_auth_config_unknown(): 61 | auth_config_str = '{"key": "value"}' 62 | assert parse_docker_auth_config(auth_config_str) is None 63 | 64 | 65 | def test_parse_docker_auth_config_error(): 66 | auth_config_str = "bad//string" 67 | with pytest.raises(ValueError): 68 | parse_docker_auth_config(auth_config_str) 69 | 70 | 71 | def test_parse_docker_auth_all(): 72 | test_dict = { 73 | "auths": { 74 | "localhost:5000": {"auth": "dXNlcjE6cGFzczE=="}, 75 | }, 76 | "credHelpers": {".dkr.ecr..amazonaws.com": "ecr-login"}, 77 | "credsStore": "ecr-login", 78 | } 79 | auth_config_json = json.dumps(test_dict) 80 | assert parse_docker_auth_config(auth_config_json) == [ 81 | DockerAuthInfo( 82 | registry="localhost:5000", 83 | username="user1", 84 | password="pass1", 85 | ) 86 | ] 87 | -------------------------------------------------------------------------------- /core/tests/test_core.py: -------------------------------------------------------------------------------- 1 | import tempfile 2 | from pathlib import Path 3 | 4 | from testcontainers.core.container import DockerContainer 5 | 6 | 7 | def test_garbage_collection_is_defensive(): 8 | # For more info, see https://github.com/testcontainers/testcontainers-python/issues/399 9 | # we simulate garbage collection: start, stop, then call `del` 10 | container = DockerContainer("postgres:latest") 11 | container.start() 12 | container.stop(force=True, delete_volume=True) 13 | delattr(container, "_container") 14 | del container 15 | 16 | 17 | def test_get_logs(): 18 | with DockerContainer("hello-world") as container: 19 | stdout, stderr = container.get_logs() 20 | assert isinstance(stdout, bytes) 21 | assert isinstance(stderr, bytes) 22 | assert "Hello from Docker".encode() in stdout, "There should be something on stdout" 23 | 24 | 25 | def test_docker_container_with_env_file(): 26 | """Test that environment variables can be loaded from a file""" 27 | with tempfile.TemporaryDirectory() as temp_directory: 28 | env_file_path = Path(temp_directory) / "env_file" 29 | with open(env_file_path, "w") as f: 30 | f.write( 31 | """ 32 | TEST_ENV_VAR=hello 33 | NUMBER=123 34 | DOMAIN=example.org 35 | ADMIN_EMAIL=admin@${DOMAIN} 36 | ROOT_URL=${DOMAIN}/app 37 | """ 38 | ) 39 | container = DockerContainer("alpine").with_command("tail -f /dev/null") # Keep the container running 40 | container.with_env_file(env_file_path) # Load the environment variables from the file 41 | with container: 42 | output = container.exec("env").output.decode("utf-8").strip() 43 | assert "TEST_ENV_VAR=hello" in output 44 | assert "NUMBER=123" in output 45 | assert "DOMAIN=example.org" in output 46 | assert "ADMIN_EMAIL=admin@example.org" in output 47 | assert "ROOT_URL=example.org/app" in output 48 | print(output) 49 | -------------------------------------------------------------------------------- /core/tests/test_labels.py: -------------------------------------------------------------------------------- 1 | from testcontainers.core.labels import ( 2 | LABEL_LANG, 3 | LABEL_SESSION_ID, 4 | LABEL_TESTCONTAINERS, 5 | LABEL_VERSION, 6 | create_labels, 7 | TESTCONTAINERS_NAMESPACE, 8 | ) 9 | import pytest 10 | from testcontainers.core.config import RYUK_IMAGE 11 | 12 | 13 | def assert_in_with_value(labels: dict[str, str], label: str, value: str, known_before_test_time: bool): 14 | assert label in labels 15 | if known_before_test_time: 16 | assert labels[label] == value 17 | 18 | 19 | testdata = [ 20 | (LABEL_LANG, "python", True), 21 | (LABEL_TESTCONTAINERS, "true", True), 22 | (LABEL_SESSION_ID, "some", False), 23 | (LABEL_VERSION, "some", False), 24 | ] 25 | 26 | 27 | @pytest.mark.parametrize("label,value,known_before_test_time", testdata) 28 | def test_containers_creates_expected_labels(label, value, known_before_test_time): 29 | actual_labels = create_labels("not-ryuk", None) 30 | assert_in_with_value(actual_labels, label, value, known_before_test_time) 31 | 32 | 33 | def test_containers_throws_on_namespace_collision(): 34 | with pytest.raises(ValueError): 35 | create_labels("not-ryuk", {TESTCONTAINERS_NAMESPACE: "fake"}) 36 | 37 | 38 | def test_containers_respect_custom_labels_if_no_collision(): 39 | custom_namespace = "org.foo.bar" 40 | value = "fake" 41 | actual_labels = create_labels("not-ryuk", {custom_namespace: value}) 42 | assert_in_with_value(actual_labels, custom_namespace, value, True) 43 | 44 | 45 | def test_if_ryuk_no_session(): 46 | actual_labels = create_labels(RYUK_IMAGE, None) 47 | assert LABEL_SESSION_ID not in actual_labels 48 | 49 | 50 | def test_session_are_module_import_scoped(): 51 | """ 52 | Asserts that sessions are a module-level variable and don't differ between invocation 53 | """ 54 | first_labels = create_labels("not-ryuk", None) 55 | second_labels = create_labels("not-ryuk", None) 56 | assert LABEL_SESSION_ID in first_labels 57 | assert LABEL_SESSION_ID in second_labels 58 | assert first_labels[LABEL_SESSION_ID] == second_labels[LABEL_SESSION_ID] 59 | 60 | 61 | def test_create_no_side_effects(): 62 | input_labels = {"key": "value"} 63 | expected_labels = input_labels.copy() 64 | create_labels("not-ryuk", input_labels) 65 | assert input_labels == expected_labels, input_labels 66 | -------------------------------------------------------------------------------- /core/tests/test_new_docker_api.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from testcontainers.core.container import DockerContainer 4 | 5 | 6 | def test_docker_custom_image(): 7 | container = DockerContainer("mysql:5.7.17") 8 | container.with_exposed_ports(3306) 9 | container.with_env("MYSQL_ROOT_PASSWORD", "root") 10 | 11 | with container: 12 | port = container.get_exposed_port(3306) 13 | assert int(port) > 0 14 | 15 | 16 | def test_docker_kwargs(): 17 | code_dir = Path(__file__).parent 18 | container_first = DockerContainer("nginx:latest") 19 | container_first.with_volume_mapping(code_dir, "/code") 20 | 21 | container_second = DockerContainer("nginx:latest") 22 | 23 | with container_first: 24 | container_second.with_kwargs(volumes_from=[container_first._container.short_id]) 25 | with container_second: 26 | files_first = container_first.exec("ls /code").output.decode("utf-8").strip() 27 | files_second = container_second.exec("ls /code").output.decode("utf-8").strip() 28 | assert files_first == files_second 29 | -------------------------------------------------------------------------------- /core/tests/test_ryuk.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | import pytest 3 | from pytest import MonkeyPatch 4 | 5 | from docker import DockerClient 6 | from docker.errors import NotFound 7 | 8 | from testcontainers.core.config import testcontainers_config 9 | from testcontainers.core.container import Reaper 10 | from testcontainers.core.container import DockerContainer 11 | from testcontainers.core.waiting_utils import wait_for_logs 12 | 13 | 14 | @pytest.mark.inside_docker_check 15 | def test_wait_for_reaper(monkeypatch: MonkeyPatch): 16 | Reaper.delete_instance() 17 | monkeypatch.setattr(testcontainers_config, "ryuk_reconnection_timeout", "0.1s") 18 | container = DockerContainer("hello-world") 19 | container.start() 20 | 21 | docker_client = container.get_docker_client().client 22 | 23 | container_id = container.get_wrapped_container().short_id 24 | reaper_id = Reaper._container.get_wrapped_container().short_id 25 | 26 | assert docker_client.containers.get(container_id) is not None 27 | assert docker_client.containers.get(reaper_id) is not None 28 | 29 | wait_for_logs(container, "Hello from Docker!") 30 | 31 | Reaper._socket.close() 32 | 33 | sleep(0.6) # Sleep until Ryuk reaps all dangling containers. 0.5 extra seconds for good measure. 34 | 35 | with pytest.raises(NotFound): 36 | docker_client.containers.get(container_id) 37 | with pytest.raises(NotFound): 38 | docker_client.containers.get(reaper_id) 39 | 40 | # Cleanup Ryuk class fields after manual Ryuk shutdown 41 | Reaper.delete_instance() 42 | 43 | 44 | @pytest.mark.inside_docker_check 45 | def test_container_without_ryuk(monkeypatch: MonkeyPatch): 46 | Reaper.delete_instance() 47 | monkeypatch.setattr(testcontainers_config, "ryuk_disabled", True) 48 | with DockerContainer("hello-world") as container: 49 | wait_for_logs(container, "Hello from Docker!") 50 | assert Reaper._instance is None 51 | 52 | 53 | @pytest.mark.inside_docker_check 54 | def test_ryuk_is_reused_in_same_process(): 55 | with DockerContainer("hello-world") as container: 56 | wait_for_logs(container, "Hello from Docker!") 57 | reaper_instance = Reaper._instance 58 | 59 | assert reaper_instance is not None 60 | 61 | with DockerContainer("hello-world") as container: 62 | wait_for_logs(container, "Hello from Docker!") 63 | assert reaper_instance is Reaper._instance 64 | -------------------------------------------------------------------------------- /core/tests/test_socat.py: -------------------------------------------------------------------------------- 1 | import httpx 2 | import pytest 3 | from testcontainers.core.container import DockerContainer 4 | from testcontainers.core.network import Network 5 | from testcontainers.socat.socat import SocatContainer 6 | 7 | 8 | def test_socat_with_helloworld(): 9 | with ( 10 | Network() as network, 11 | DockerContainer("testcontainers/helloworld:1.2.0") 12 | .with_exposed_ports(8080) 13 | .with_network(network) 14 | .with_network_aliases("helloworld"), 15 | SocatContainer().with_network(network).with_target(8080, "helloworld") as socat, 16 | ): 17 | socat_url = f"http://{socat.get_container_host_ip()}:{socat.get_exposed_port(8080)}" 18 | 19 | response = httpx.get(f"{socat_url}/ping") 20 | 21 | assert response.status_code == 200 22 | assert response.content == b"PONG" 23 | -------------------------------------------------------------------------------- /core/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | from pytest import MonkeyPatch, raises, mark 5 | 6 | from testcontainers.core import utils 7 | 8 | 9 | def test_setup_logger() -> None: 10 | assert utils.setup_logger("test") is not None 11 | 12 | 13 | @mark.parametrize("platform, expected", [("linux", "linux"), ("linux2", "linux"), ("darwin", "mac"), ("win32", "win")]) 14 | def test_os_name(monkeypatch: MonkeyPatch, platform: str, expected: str) -> None: 15 | assert utils.os_name() is not None 16 | monkeypatch.setattr("sys.platform", platform) 17 | assert utils.os_name() == expected 18 | 19 | 20 | def test_is_mac(monkeypatch: MonkeyPatch) -> None: 21 | monkeypatch.setattr("testcontainers.core.utils.os_name", lambda: "mac") 22 | assert utils.is_mac() 23 | 24 | 25 | def test_is_linux(monkeypatch: MonkeyPatch) -> None: 26 | monkeypatch.setattr("testcontainers.core.utils.os_name", lambda: "linux") 27 | assert utils.is_linux() 28 | 29 | 30 | def test_is_windows(monkeypatch: MonkeyPatch) -> None: 31 | monkeypatch.setattr("testcontainers.core.utils.os_name", lambda: "win") 32 | assert utils.is_windows() 33 | 34 | 35 | def test_is_arm(monkeypatch: MonkeyPatch) -> None: 36 | monkeypatch.setattr("platform.machine", lambda: "x86_64") 37 | assert not utils.is_arm() 38 | monkeypatch.setattr("platform.machine", lambda: "arm64") 39 | assert utils.is_arm() 40 | monkeypatch.setattr("platform.machine", lambda: "aarch64") 41 | assert utils.is_arm() 42 | 43 | 44 | def test_inside_container(monkeypatch: MonkeyPatch) -> None: 45 | assert not utils.inside_container() 46 | monkeypatch.setattr("os.path.exists", lambda _: True) 47 | assert utils.inside_container() 48 | 49 | 50 | def test_raise_for_deprecated_parameters() -> None: 51 | kwargs = {"key": "value"} 52 | current = "key" 53 | replacement = "new_key" 54 | with raises(ValueError) as e: 55 | result = utils.raise_for_deprecated_parameter(kwargs, current, replacement) 56 | assert str(e.value) == "Parameter 'deprecated' is deprecated and should be replaced by 'replacement'." 57 | assert result == {} 58 | 59 | 60 | @pytest.fixture 61 | def fake_cgroup(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Path: 62 | target = tmp_path / "cgroup" 63 | monkeypatch.setattr(utils, "CGROUP_FILE", target) 64 | return target 65 | 66 | 67 | def test_get_running_container_id_empty_or_missing(fake_cgroup: Path) -> None: 68 | # non existing does not fail but is only none 69 | assert utils.get_running_in_container_id() is None 70 | fake_cgroup.write_text("12:devices:/system.slice/sshd.service\n13:cpuset:\n") 71 | # missing docker does also not fail 72 | assert utils.get_running_in_container_id() is None 73 | 74 | 75 | def test_get_running_container_id(fake_cgroup: Path) -> None: 76 | container_id = "b78eebb08f89158ed6e2ed2fe" 77 | fake_cgroup.write_text(f"13:cpuset:/docker/{container_id}") 78 | assert utils.get_running_in_container_id() == container_id 79 | -------------------------------------------------------------------------------- /core/tests/test_version.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from packaging.version import InvalidVersion 3 | 4 | from testcontainers.core.version import ComparableVersion 5 | 6 | 7 | @pytest.fixture 8 | def version(): 9 | return ComparableVersion("1.0.0") 10 | 11 | 12 | @pytest.mark.parametrize("other_version, expected", [("0.9.0", False), ("1.0.0", False), ("1.1.0", True)]) 13 | def test_lt(version, other_version, expected): 14 | assert (version < other_version) == expected 15 | 16 | 17 | @pytest.mark.parametrize("other_version, expected", [("0.9.0", False), ("1.0.0", True), ("1.1.0", True)]) 18 | def test_le(version, other_version, expected): 19 | assert (version <= other_version) == expected 20 | 21 | 22 | @pytest.mark.parametrize("other_version, expected", [("0.9.0", False), ("1.0.0", True), ("1.1.0", False)]) 23 | def test_eq(version, other_version, expected): 24 | assert (version == other_version) == expected 25 | 26 | 27 | @pytest.mark.parametrize("other_version, expected", [("0.9.0", True), ("1.0.0", False), ("1.1.0", True)]) 28 | def test_ne(version, other_version, expected): 29 | assert (version != other_version) == expected 30 | 31 | 32 | @pytest.mark.parametrize("other_version, expected", [("0.9.0", True), ("1.0.0", False), ("1.1.0", False)]) 33 | def test_gt(version, other_version, expected): 34 | assert (version > other_version) == expected 35 | 36 | 37 | @pytest.mark.parametrize("other_version, expected", [("0.9.0", True), ("1.0.0", True), ("1.1.0", False)]) 38 | def test_ge(version, other_version, expected): 39 | assert (version >= other_version) == expected 40 | 41 | 42 | @pytest.mark.parametrize( 43 | "invalid_version", 44 | [ 45 | "invalid", 46 | "1..0", 47 | ], 48 | ) 49 | def test_invalid_version_raises_error(invalid_version): 50 | with pytest.raises(InvalidVersion): 51 | ComparableVersion(invalid_version) 52 | 53 | 54 | @pytest.mark.parametrize( 55 | "invalid_version", 56 | [ 57 | "invalid", 58 | "1..0", 59 | ], 60 | ) 61 | def test_comparison_with_invalid_version_raises_error(version, invalid_version): 62 | with pytest.raises(InvalidVersion): 63 | assert version < invalid_version 64 | 65 | with pytest.raises(InvalidVersion): 66 | assert version <= invalid_version 67 | 68 | with pytest.raises(InvalidVersion): 69 | assert version == invalid_version 70 | 71 | with pytest.raises(InvalidVersion): 72 | assert version != invalid_version 73 | 74 | with pytest.raises(InvalidVersion): 75 | assert version > invalid_version 76 | 77 | with pytest.raises(InvalidVersion): 78 | assert version >= invalid_version 79 | -------------------------------------------------------------------------------- /core/tests/test_waiting_utils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from testcontainers.core.container import DockerContainer 4 | from testcontainers.core.waiting_utils import wait_for_logs 5 | 6 | 7 | def test_wait_for_logs() -> None: 8 | with DockerContainer("hello-world") as container: 9 | wait_for_logs(container, "Hello from Docker!") 10 | 11 | 12 | def test_timeout_is_raised_when_waiting_for_logs() -> None: 13 | with pytest.raises(TimeoutError), DockerContainer("alpine").with_command("sleep 2") as container: 14 | wait_for_logs(container, "Hello from Docker!", timeout=1e-3) 15 | -------------------------------------------------------------------------------- /doctests/conf.py: -------------------------------------------------------------------------------- 1 | extensions = [ 2 | "sphinx.ext.autodoc", 3 | "sphinx.ext.doctest", 4 | ] 5 | master_doc = "README" 6 | -------------------------------------------------------------------------------- /modules/README.md: -------------------------------------------------------------------------------- 1 | # Modules 2 | 3 | The modules directory contains all the community-supported containers that see common use cases and merit their own easy-access container. 4 | -------------------------------------------------------------------------------- /modules/arangodb/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.arangodb.ArangoDbContainer 2 | .. title:: testcontainers.arangodb.ArangoDbContainer 3 | -------------------------------------------------------------------------------- /modules/aws/README.rst: -------------------------------------------------------------------------------- 1 | :code:`testcontainers-aws` is a set of AWS containers modules that can be used to create AWS containers. 2 | 3 | .. autoclass:: testcontainers.aws.AWSLambdaContainer 4 | .. title:: testcontainers.aws.AWSLambdaContainer 5 | 6 | The following environment variables are used by the AWS Lambda container: 7 | 8 | +-------------------------------+--------------------------+------------------------------+ 9 | | Env Variable | Default | Notes | 10 | +===============================+==========================+==============================+ 11 | | ``AWS_DEFAULT_REGION`` | ``us-west-1`` | Fetched from os environment | 12 | +-------------------------------+--------------------------+------------------------------+ 13 | | ``AWS_ACCESS_KEY_ID`` | ``testcontainers-aws`` | Fetched from os environment | 14 | +-------------------------------+--------------------------+------------------------------+ 15 | | ``AWS_SECRET_ACCESS_KEY`` | ``testcontainers-aws`` | Fetched from os environment | 16 | +-------------------------------+--------------------------+------------------------------+ 17 | 18 | Each one of the environment variables is expected to be set in the host machine where the test is running. 19 | 20 | Make sure you are using an image based on :code:`public.ecr.aws/lambda/python` 21 | 22 | Please checkout https://docs.aws.amazon.com/lambda/latest/dg/python-image.html for more information on how to run AWS Lambda functions locally. 23 | -------------------------------------------------------------------------------- /modules/aws/testcontainers/aws/__init__.py: -------------------------------------------------------------------------------- 1 | from .aws_lambda import AWSLambdaContainer # noqa: F401 2 | -------------------------------------------------------------------------------- /modules/aws/testcontainers/aws/aws_lambda.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Union 3 | 4 | import httpx 5 | 6 | from testcontainers.core.image import DockerImage 7 | from testcontainers.generic.server import ServerContainer 8 | 9 | RIE_PATH = "/2015-03-31/functions/function/invocations" 10 | # AWS OS-only base images contain an Amazon Linux distribution and the runtime interface emulator (RIE) for Lambda. 11 | 12 | # This comment can be removed (Used for testing) 13 | 14 | 15 | class AWSLambdaContainer(ServerContainer): 16 | """ 17 | AWS Lambda container that is based on a custom image. 18 | 19 | Example: 20 | 21 | .. doctest:: 22 | 23 | >>> from testcontainers.aws import AWSLambdaContainer 24 | >>> from testcontainers.core.waiting_utils import wait_for_logs 25 | >>> from testcontainers.core.image import DockerImage 26 | 27 | >>> with DockerImage(path="./modules/aws/tests/lambda_sample", tag="test-lambda:latest") as image: 28 | ... with AWSLambdaContainer(image=image, port=8080) as func: 29 | ... response = func.send_request(data={'payload': 'some data'}) 30 | ... assert response.status_code == 200 31 | ... assert "Hello from AWS Lambda using Python" in response.json() 32 | ... delay = wait_for_logs(func, "START RequestId:") 33 | 34 | :param image: Docker image to be used for the container. 35 | :param port: Port to be exposed on the container (default: 8080). 36 | """ 37 | 38 | def __init__(self, image: Union[str, DockerImage], port: int = 8080) -> None: 39 | super().__init__(port, str(image)) 40 | self.with_env("AWS_DEFAULT_REGION", os.environ.get("AWS_DEFAULT_REGION", "us-west-1")) 41 | self.with_env("AWS_ACCESS_KEY_ID", os.environ.get("AWS_ACCESS_KEY_ID", "testcontainers-aws")) 42 | self.with_env("AWS_SECRET_ACCESS_KEY", os.environ.get("AWS_SECRET_ACCESS_KEY", "testcontainers-aws")) 43 | 44 | def get_api_url(self) -> str: 45 | return self._create_connection_url() + RIE_PATH 46 | 47 | def send_request(self, data: dict) -> httpx.Response: 48 | """ 49 | Send a request to the AWS Lambda function. 50 | 51 | :param data: Data to be sent to the AWS Lambda function. 52 | :return: Response from the AWS Lambda function. 53 | """ 54 | client = self.get_client() 55 | return client.post(self.get_api_url(), json=data) 56 | -------------------------------------------------------------------------------- /modules/aws/tests/lambda_sample/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/lambda/python:3.9 2 | 3 | RUN pip install boto3 4 | 5 | COPY lambda_function.py ${LAMBDA_TASK_ROOT} 6 | 7 | EXPOSE 8080 8 | 9 | # Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile) 10 | CMD [ "lambda_function.handler" ] 11 | -------------------------------------------------------------------------------- /modules/aws/tests/lambda_sample/lambda_function.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | 4 | def handler(event, context): 5 | return "Hello from AWS Lambda using Python" + sys.version + "!" 6 | -------------------------------------------------------------------------------- /modules/aws/tests/test_aws.py: -------------------------------------------------------------------------------- 1 | import re 2 | import os 3 | 4 | import pytest 5 | from unittest.mock import patch 6 | 7 | from testcontainers.core.image import DockerImage 8 | from testcontainers.aws import AWSLambdaContainer 9 | from testcontainers.aws.aws_lambda import RIE_PATH 10 | 11 | DOCKER_FILE_PATH = "./modules/aws/tests/lambda_sample" 12 | IMAGE_TAG = "lambda:test" 13 | 14 | 15 | def test_aws_lambda_container(): 16 | with DockerImage(path=DOCKER_FILE_PATH, tag="test-lambda:latest") as image: 17 | with AWSLambdaContainer(image=image, port=8080) as func: 18 | assert func.get_container_host_ip() == "localhost" 19 | assert func.internal_port == 8080 20 | assert func.env["AWS_DEFAULT_REGION"] == "us-west-1" 21 | assert func.env["AWS_ACCESS_KEY_ID"] == "testcontainers-aws" 22 | assert func.env["AWS_SECRET_ACCESS_KEY"] == "testcontainers-aws" 23 | assert re.match(rf"http://localhost:\d+{RIE_PATH}", func.get_api_url()) 24 | response = func.send_request(data={"payload": "test"}) 25 | assert response.status_code == 200 26 | assert "Hello from AWS Lambda using Python" in response.json() 27 | for log_str in ["START RequestId", "END RequestId", "REPORT RequestId"]: 28 | assert log_str in func.get_stdout() 29 | 30 | 31 | def test_aws_lambda_container_external_env_vars(): 32 | vars = { 33 | "AWS_DEFAULT_REGION": "region", 34 | "AWS_ACCESS_KEY_ID": "id", 35 | "AWS_SECRET_ACCESS_KEY": "key", 36 | } 37 | with patch.dict(os.environ, vars): 38 | with DockerImage(path=DOCKER_FILE_PATH, tag="test-lambda-env-vars:latest") as image: 39 | with AWSLambdaContainer(image=image, port=8080) as func: 40 | assert func.env["AWS_DEFAULT_REGION"] == "region" 41 | assert func.env["AWS_ACCESS_KEY_ID"] == "id" 42 | assert func.env["AWS_SECRET_ACCESS_KEY"] == "key" 43 | 44 | 45 | def test_aws_lambda_container_no_port(): 46 | with DockerImage(path=DOCKER_FILE_PATH, tag="test-lambda-no-port:latest") as image: 47 | with AWSLambdaContainer(image=image) as func: 48 | response = func.send_request(data={"payload": "test"}) 49 | assert response.status_code == 200 50 | 51 | 52 | def test_aws_lambda_container_no_path(): 53 | with pytest.raises(TypeError): 54 | with DockerImage(path=DOCKER_FILE_PATH, tag="test-lambda-no-path:latest") as image: 55 | with AWSLambdaContainer() as func: # noqa: F841 56 | pass 57 | -------------------------------------------------------------------------------- /modules/azurite/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.azurite.AzuriteContainer 2 | .. title:: testcontainers.azurite.AzuriteContainer 3 | -------------------------------------------------------------------------------- /modules/azurite/tests/test_azurite.py: -------------------------------------------------------------------------------- 1 | from azure.storage.blob import BlobServiceClient 2 | 3 | from testcontainers.azurite import AzuriteContainer 4 | 5 | 6 | def test_docker_run_azurite(): 7 | with AzuriteContainer() as azurite_container: 8 | blob_service_client = BlobServiceClient.from_connection_string( 9 | azurite_container.get_connection_string(), api_version="2019-12-12" 10 | ) 11 | 12 | blob_service_client.create_container("test-container") 13 | -------------------------------------------------------------------------------- /modules/cassandra/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.cassandra.CassandraContainer 2 | .. title:: testcontainers.cassandra.CassandraContainer 3 | -------------------------------------------------------------------------------- /modules/cassandra/testcontainers/cassandra/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | from testcontainers.core.container import DockerContainer 14 | from testcontainers.core.waiting_utils import wait_for_logs 15 | 16 | 17 | class CassandraContainer(DockerContainer): 18 | """ 19 | Cassandra database container. 20 | 21 | Example: 22 | 23 | .. doctest:: 24 | 25 | >>> from testcontainers.cassandra import CassandraContainer 26 | >>> from cassandra.cluster import Cluster, DCAwareRoundRobinPolicy 27 | 28 | >>> with CassandraContainer("cassandra:4.1.4") as cassandra, Cluster( 29 | ... cassandra.get_contact_points(), 30 | ... load_balancing_policy=DCAwareRoundRobinPolicy(cassandra.get_local_datacenter()), 31 | ... ) as cluster: 32 | ... session = cluster.connect() 33 | ... result = session.execute("SELECT release_version FROM system.local;") 34 | ... result.one().release_version 35 | '4.1.4' 36 | """ 37 | 38 | CQL_PORT = 9042 39 | DEFAULT_LOCAL_DATACENTER = "datacenter1" 40 | 41 | def __init__(self, image: str = "cassandra:latest", **kwargs) -> None: 42 | super().__init__(image=image, **kwargs) 43 | self.with_exposed_ports(self.CQL_PORT) 44 | self.with_env("JVM_OPTS", "-Dcassandra.skip_wait_for_gossip_to_settle=0 -Dcassandra.initial_token=0") 45 | self.with_env("HEAP_NEWSIZE", "128M") 46 | self.with_env("MAX_HEAP_SIZE", "1024M") 47 | self.with_env("CASSANDRA_ENDPOINT_SNITCH", "GossipingPropertyFileSnitch") 48 | self.with_env("CASSANDRA_DC", self.DEFAULT_LOCAL_DATACENTER) 49 | 50 | def _connect(self): 51 | wait_for_logs(self, "Startup complete") 52 | 53 | def start(self) -> "CassandraContainer": 54 | super().start() 55 | self._connect() 56 | return self 57 | 58 | def get_contact_points(self) -> list[tuple[str, int]]: 59 | return [(self.get_container_host_ip(), int(self.get_exposed_port(self.CQL_PORT)))] 60 | 61 | def get_local_datacenter(self) -> str: 62 | return self.env.get("CASSANDRA_DC", self.DEFAULT_LOCAL_DATACENTER) 63 | -------------------------------------------------------------------------------- /modules/cassandra/tests/test_cassandra.py: -------------------------------------------------------------------------------- 1 | from cassandra.cluster import Cluster, DCAwareRoundRobinPolicy 2 | 3 | from testcontainers.cassandra import CassandraContainer 4 | 5 | 6 | def test_docker_run_cassandra(): 7 | with CassandraContainer("cassandra:4.1.4") as cassandra: 8 | cluster = Cluster( 9 | cassandra.get_contact_points(), 10 | load_balancing_policy=DCAwareRoundRobinPolicy(cassandra.get_local_datacenter()), 11 | ) 12 | session = cluster.connect() 13 | result = session.execute("SELECT release_version FROM system.local;") 14 | assert result.one().release_version == "4.1.4" 15 | -------------------------------------------------------------------------------- /modules/chroma/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.chroma.ChromaContainer 2 | .. title:: testcontainers.minio.ChromaContainer 3 | -------------------------------------------------------------------------------- /modules/chroma/testcontainers/chroma/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | from requests import ConnectionError, get 4 | 5 | from testcontainers.core.container import DockerContainer 6 | from testcontainers.core.utils import raise_for_deprecated_parameter 7 | from testcontainers.core.waiting_utils import wait_container_is_ready 8 | 9 | if TYPE_CHECKING: 10 | from requests import Response 11 | 12 | 13 | class ChromaContainer(DockerContainer): 14 | """ 15 | The example below spins up a ChromaDB container, performs a healthcheck and creates a collection. 16 | The method :code:`get_client` can be used to create a client for the Chroma Python Client. 17 | 18 | Example: 19 | 20 | .. doctest:: 21 | 22 | >>> import chromadb 23 | >>> from testcontainers.chroma import ChromaContainer 24 | 25 | >>> with ChromaContainer() as chroma: 26 | ... config = chroma.get_config() 27 | ... client = chromadb.HttpClient(host=config["host"], port=config["port"]) 28 | ... col = client.get_or_create_collection("test") 29 | ... col.name 30 | 'test' 31 | """ 32 | 33 | def __init__( 34 | self, 35 | image: str = "chromadb/chroma:latest", 36 | port: int = 8000, 37 | **kwargs, 38 | ) -> None: 39 | """ 40 | Args: 41 | image: Docker image to use for the MinIO container. 42 | port: Port to expose on the container. 43 | access_key: Access key for client connections. 44 | secret_key: Secret key for client connections. 45 | """ 46 | raise_for_deprecated_parameter(kwargs, "port_to_expose", "port") 47 | super().__init__(image, **kwargs) 48 | self.port = port 49 | 50 | self.with_exposed_ports(self.port) 51 | # self.with_command(f"server /data --address :{self.port}") 52 | 53 | def get_config(self) -> dict: 54 | """This method returns the configuration of the Chroma container, 55 | including the endpoint. 56 | 57 | Returns: 58 | dict: {`endpoint`: str} 59 | """ 60 | host_ip = self.get_container_host_ip() 61 | exposed_port = self.get_exposed_port(self.port) 62 | return { 63 | "endpoint": f"{host_ip}:{exposed_port}", 64 | "host": host_ip, 65 | "port": exposed_port, 66 | } 67 | 68 | @wait_container_is_ready(ConnectionError) 69 | def _healthcheck(self) -> None: 70 | """This is an internal method used to check if the Chroma container 71 | is healthy and ready to receive requests.""" 72 | url = f"http://{self.get_config()['endpoint']}/api/v1/heartbeat" 73 | response: Response = get(url) 74 | response.raise_for_status() 75 | 76 | def start(self) -> "ChromaContainer": 77 | """This method starts the Chroma container and runs the healthcheck 78 | to verify that the container is ready to use.""" 79 | super().start() 80 | self._healthcheck() 81 | return self 82 | -------------------------------------------------------------------------------- /modules/chroma/tests/test_chroma.py: -------------------------------------------------------------------------------- 1 | from testcontainers.chroma import ChromaContainer 2 | import chromadb 3 | 4 | 5 | def test_docker_run_chroma(): 6 | with ChromaContainer(image="chromadb/chroma:0.4.24") as chroma: 7 | client = chromadb.HttpClient(host=chroma.get_config()["host"], port=chroma.get_config()["port"]) 8 | col = client.get_or_create_collection("test") 9 | assert col.name == "test" 10 | -------------------------------------------------------------------------------- /modules/clickhouse/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.clickhouse.ClickHouseContainer 2 | .. title:: testcontainers.clickhouse.ClickHouseContainer 3 | -------------------------------------------------------------------------------- /modules/clickhouse/testcontainers/clickhouse/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | import os 14 | from typing import Optional 15 | from urllib.error import HTTPError, URLError 16 | from urllib.request import urlopen 17 | 18 | from testcontainers.core.generic import DbContainer 19 | from testcontainers.core.utils import raise_for_deprecated_parameter 20 | from testcontainers.core.waiting_utils import wait_container_is_ready 21 | 22 | 23 | class ClickHouseContainer(DbContainer): 24 | """ 25 | ClickHouse database container. 26 | 27 | Example: 28 | 29 | The example spins up a ClickHouse database and connects to it using the 30 | :code:`clickhouse-driver`. 31 | 32 | .. doctest:: 33 | 34 | >>> import clickhouse_driver 35 | >>> from testcontainers.clickhouse import ClickHouseContainer 36 | 37 | >>> with ClickHouseContainer("clickhouse/clickhouse-server:21.8") as clickhouse: 38 | ... client = clickhouse_driver.Client.from_url(clickhouse.get_connection_url()) 39 | ... client.execute("select 'working'") 40 | [('working',)] 41 | """ 42 | 43 | def __init__( 44 | self, 45 | image: str = "clickhouse/clickhouse-server:latest", 46 | port: int = 9000, 47 | username: Optional[str] = None, 48 | password: Optional[str] = None, 49 | dbname: Optional[str] = None, 50 | **kwargs, 51 | ) -> None: 52 | raise_for_deprecated_parameter(kwargs, "user", "username") 53 | super().__init__(image=image, **kwargs) 54 | self.username = username or os.environ.get("CLICKHOUSE_USER", "test") 55 | self.password = password or os.environ.get("CLICKHOUSE_PASSWORD", "test") 56 | self.dbname = dbname or os.environ.get("CLICKHOUSE_DB", "test") 57 | self.port = port 58 | self.with_exposed_ports(self.port) 59 | self.with_exposed_ports(8123) 60 | 61 | @wait_container_is_ready(HTTPError, URLError) 62 | def _connect(self) -> None: 63 | # noinspection HttpUrlsUsage 64 | url = f"http://{self.get_container_host_ip()}:{self.get_exposed_port(8123)}" 65 | with urlopen(url) as r: 66 | assert b"Ok" in r.read() 67 | 68 | def _configure(self) -> None: 69 | self.with_env("CLICKHOUSE_USER", self.username) 70 | self.with_env("CLICKHOUSE_PASSWORD", self.password) 71 | self.with_env("CLICKHOUSE_DB", self.dbname) 72 | 73 | def get_connection_url(self, host: Optional[str] = None) -> str: 74 | return self._create_connection_url( 75 | dialect="clickhouse", 76 | username=self.username, 77 | password=self.password, 78 | dbname=self.dbname, 79 | host=host, 80 | port=self.port, 81 | ) 82 | -------------------------------------------------------------------------------- /modules/clickhouse/tests/test_clickhouse.py: -------------------------------------------------------------------------------- 1 | import clickhouse_driver 2 | 3 | from testcontainers.clickhouse import ClickHouseContainer 4 | 5 | 6 | def test_docker_run_clickhouse(): 7 | clickhouse_container = ClickHouseContainer() 8 | with clickhouse_container as clickhouse: 9 | client = clickhouse_driver.Client.from_url(clickhouse.get_connection_url()) 10 | result = client.execute("select 'working'") 11 | 12 | assert result == [("working",)] 13 | -------------------------------------------------------------------------------- /modules/cockroachdb/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.cockroachdb.CockroachDBContainer 2 | .. title:: testcontainers.cockroachdb.CockroachDBContainer 3 | -------------------------------------------------------------------------------- /modules/cockroachdb/tests/test_cockroachdb.py: -------------------------------------------------------------------------------- 1 | import sqlalchemy 2 | 3 | from testcontainers.cockroachdb import CockroachDBContainer 4 | 5 | 6 | def test_docker_run_mysql(): 7 | config = CockroachDBContainer("cockroachdb/cockroach:v24.1.1") 8 | with config as crdb: 9 | engine = sqlalchemy.create_engine(crdb.get_connection_url()) 10 | with engine.begin() as connection: 11 | result = connection.execute(sqlalchemy.text("select version()")) 12 | for row in result: 13 | assert "CockroachDB" in row[0] 14 | assert "v24.1.1" in row[0] 15 | -------------------------------------------------------------------------------- /modules/cosmosdb/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.cosmosdb.CosmosDBMongoEndpointContainer 2 | .. title:: testcontainers.cosmosdb.CosmosDBMongoEndpointContainer 3 | 4 | .. autoclass:: testcontainers.cosmosdb.CosmosDBNoSQLEndpointContainer 5 | .. title:: testcontainers.cosmosdb.CosmosDBNoSQLEndpointContainer 6 | -------------------------------------------------------------------------------- /modules/cosmosdb/testcontainers/cosmosdb/__init__.py: -------------------------------------------------------------------------------- 1 | from .mongodb import CosmosDBMongoEndpointContainer 2 | from .nosql import CosmosDBNoSQLEndpointContainer 3 | 4 | __all__ = ["CosmosDBMongoEndpointContainer", "CosmosDBNoSQLEndpointContainer"] 5 | -------------------------------------------------------------------------------- /modules/cosmosdb/testcontainers/cosmosdb/_grab.py: -------------------------------------------------------------------------------- 1 | import tarfile 2 | import tempfile 3 | from contextlib import contextmanager 4 | from os import path 5 | from pathlib import Path 6 | 7 | from docker.models.containers import Container 8 | 9 | 10 | @contextmanager 11 | def file(container: Container, target: str): 12 | target_path = Path(target) 13 | assert target_path.is_absolute(), "target must be an absolute path" 14 | 15 | with tempfile.TemporaryDirectory() as tmp: 16 | archive = Path(tmp) / "grabbed.tar" 17 | 18 | # download from container as tar archive 19 | with open(archive, "wb") as f: 20 | tar_bits, _ = container.get_archive(target) 21 | for chunk in tar_bits: 22 | f.write(chunk) 23 | 24 | # extract target file from tar archive 25 | with tarfile.TarFile(archive) as tar: 26 | yield tar.extractfile(path.basename(target)) 27 | -------------------------------------------------------------------------------- /modules/cosmosdb/testcontainers/cosmosdb/mongodb.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from ._emulator import CosmosDBEmulatorContainer 4 | 5 | __all__ = ["CosmosDBMongoEndpointContainer"] 6 | 7 | ENDPOINT_PORT = 10255 8 | 9 | 10 | class CosmosDBMongoEndpointContainer(CosmosDBEmulatorContainer): 11 | """ 12 | CosmosDB MongoDB enpoint Emulator. 13 | 14 | Example: 15 | 16 | .. code-block:: python 17 | 18 | >>> from testcontainers.cosmosdb import CosmosDBMongoEndpointContainer 19 | 20 | >>> with CosmosDBMongoEndpointContainer(mongodb_version="4.0") as emulator: 21 | ... print(f"Point your MongoDB client at {emulator.host}:{emulator.port} using key {emulator.key}") 22 | ... print(f"and eiher disable TLS server auth or trust the server's self signed cert (emulator.server_certificate_pem)") 23 | 24 | """ 25 | 26 | def __init__( 27 | self, 28 | mongodb_version: str, 29 | image: str = os.getenv( 30 | "AZURE_COSMOS_EMULATOR_IMAGE", "mcr.microsoft.com/cosmosdb/linux/azure-cosmos-emulator:mongodb" 31 | ), 32 | **other_kwargs, 33 | ): 34 | super().__init__(image=image, endpoint_ports=[ENDPOINT_PORT], **other_kwargs) 35 | assert mongodb_version is not None, "A MongoDB version is required to use the MongoDB Endpoint" 36 | self.mongodb_version = mongodb_version 37 | 38 | @property 39 | def port(self) -> str: 40 | """ 41 | The exposed port to the MongoDB endpoint 42 | """ 43 | return self.get_exposed_port(ENDPOINT_PORT) 44 | 45 | def _configure(self) -> None: 46 | super()._configure() 47 | self.with_env("AZURE_COSMOS_EMULATOR_ENABLE_MONGODB_ENDPOINT", self.mongodb_version) 48 | -------------------------------------------------------------------------------- /modules/cosmosdb/testcontainers/cosmosdb/nosql.py: -------------------------------------------------------------------------------- 1 | from azure.core.exceptions import ServiceRequestError 2 | from azure.cosmos import CosmosClient as SyncCosmosClient 3 | from azure.cosmos.aio import CosmosClient as AsyncCosmosClient 4 | 5 | from testcontainers.core.waiting_utils import wait_container_is_ready 6 | 7 | from ._emulator import CosmosDBEmulatorContainer 8 | 9 | __all__ = ["CosmosDBNoSQLEndpointContainer"] 10 | 11 | NOSQL_PORT = 8081 12 | 13 | 14 | class CosmosDBNoSQLEndpointContainer(CosmosDBEmulatorContainer): 15 | """ 16 | CosmosDB NoSQL enpoint Emulator. 17 | 18 | Example: 19 | 20 | .. code-block:: python 21 | 22 | >>> from testcontainers.cosmosdb import CosmosDBNoSQLEndpointContainer 23 | >>> with CosmosDBNoSQLEndpointContainer() as emulator: 24 | ... db = emulator.insecure_sync_client().create_database_if_not_exists("test") 25 | 26 | .. code-block:: python 27 | 28 | >>> from testcontainers.cosmosdb import CosmosDBNoSQLEndpointContainer 29 | >>> from azure.cosmos import CosmosClient 30 | 31 | >>> with CosmosDBNoSQLEndpointContainer() as emulator: 32 | ... client = CosmosClient(url=emulator.url, credential=emulator.key, connection_verify=False) 33 | ... db = client.create_database_if_not_exists("test") 34 | 35 | """ 36 | 37 | def __init__(self, **kwargs): 38 | super().__init__(endpoint_ports=[NOSQL_PORT], **kwargs) 39 | 40 | @property 41 | def port(self) -> str: 42 | """ 43 | The exposed port to the NoSQL endpoint 44 | """ 45 | return self.get_exposed_port(NOSQL_PORT) 46 | 47 | @property 48 | def url(self) -> str: 49 | """ 50 | The url to the NoSQL endpoint 51 | """ 52 | return f"https://{self.host}:{self.port}" 53 | 54 | def insecure_async_client(self): 55 | """ 56 | Returns an asynchronous CosmosClient instance 57 | """ 58 | return AsyncCosmosClient(url=self.url, credential=self.key, connection_verify=False) 59 | 60 | def insecure_sync_client(self): 61 | """ 62 | Returns a synchronous CosmosClient instance 63 | """ 64 | return SyncCosmosClient(url=self.url, credential=self.key, connection_verify=False) 65 | 66 | @wait_container_is_ready(ServiceRequestError) 67 | def _wait_for_query_success(self) -> None: 68 | with self.insecure_sync_client() as c: 69 | list(c.list_databases()) 70 | -------------------------------------------------------------------------------- /modules/cosmosdb/tests/test_cosmosdb_emulator.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from testcontainers.cosmosdb._emulator import CosmosDBEmulatorContainer 3 | 4 | 5 | def test_runs(): 6 | with CosmosDBEmulatorContainer(partition_count=1, bind_ports=False) as emulator: 7 | assert emulator.server_certificate_pem is not None 8 | assert emulator.get_exposed_port(8081) is not None 9 | -------------------------------------------------------------------------------- /modules/cosmosdb/tests/test_cosmosdb_mongodb.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from testcontainers.cosmosdb import CosmosDBMongoEndpointContainer 3 | 4 | 5 | def test_requires_a_version(): 6 | with pytest.raises(AssertionError, match="A MongoDB version is required"): 7 | CosmosDBMongoEndpointContainer(mongodb_version=None) 8 | 9 | # instanciates 10 | CosmosDBMongoEndpointContainer(mongodb_version="4.0") 11 | 12 | 13 | def test_runs(): 14 | with CosmosDBMongoEndpointContainer(mongodb_version="4.0", partition_count=1, bind_ports=False) as emulator: 15 | assert emulator.env["AZURE_COSMOS_EMULATOR_ENABLE_MONGODB_ENDPOINT"] == "4.0" 16 | assert emulator.get_exposed_port(10255) is not None, "The MongoDB endpoint's port should be exposed" 17 | -------------------------------------------------------------------------------- /modules/cosmosdb/tests/test_cosmosdb_nosql.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from testcontainers.cosmosdb import CosmosDBNoSQLEndpointContainer 3 | 4 | 5 | def test_runs(): 6 | with CosmosDBNoSQLEndpointContainer(partition_count=1, bind_ports=False) as emulator: 7 | assert emulator.get_exposed_port(8081) is not None, "The NoSQL endpoint's port should be exposed" 8 | -------------------------------------------------------------------------------- /modules/db2/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.db2.Db2Container 2 | .. title:: testcontainers.db2.Db2Container 3 | -------------------------------------------------------------------------------- /modules/db2/testcontainers/db2/__init__.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from typing import Optional 3 | 4 | from testcontainers.core.generic import DbContainer 5 | from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs 6 | 7 | 8 | class Db2Container(DbContainer): 9 | """ 10 | IBM Db2 database container. 11 | 12 | Example: 13 | 14 | .. doctest:: 15 | 16 | >>> import sqlalchemy 17 | >>> from testcontainers.db2 import Db2Container 18 | 19 | >>> with Db2Container("icr.io/db2_community/db2:latest") as db2: 20 | ... engine = sqlalchemy.create_engine(db2.get_connection_url()) 21 | ... with engine.begin() as connection: 22 | ... result = connection.execute(sqlalchemy.text("select service_level from sysibmadm.env_inst_info")) 23 | """ 24 | 25 | def __init__( 26 | self, 27 | image: str = "icr.io/db2_community/db2:latest", 28 | username: str = "db2inst1", 29 | password: Optional[str] = None, 30 | port: int = 50000, 31 | dbname: str = "testdb", 32 | dialect: str = "db2+ibm_db", 33 | **kwargs, 34 | ) -> None: 35 | super().__init__(image, **kwargs) 36 | 37 | self.port = port 38 | self.with_exposed_ports(self.port) 39 | 40 | self.password = password or environ.get("DB2_PASSWORD", "password") 41 | self.username = username 42 | self.dbname = dbname 43 | self.dialect = dialect 44 | 45 | def _configure(self) -> None: 46 | self.with_env("LICENSE", "accept") 47 | self.with_env("DB2INSTANCE", self.username) 48 | self.with_env("DB2INST1_PASSWORD", self.password) 49 | self.with_env("DBNAME", self.dbname) 50 | self.with_env("ARCHIVE_LOGS", "false") 51 | self.with_env("AUTOCONFIG", "false") 52 | self.with_kwargs(privileged=True) 53 | 54 | @wait_container_is_ready() 55 | def _connect(self) -> None: 56 | wait_for_logs(self, predicate="Setup has completed") 57 | 58 | def get_connection_url(self) -> str: 59 | return super()._create_connection_url( 60 | dialect=self.dialect, username=self.username, password=self.password, dbname=self.dbname, port=self.port 61 | ) 62 | -------------------------------------------------------------------------------- /modules/db2/tests/test_db2.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | import sqlalchemy 5 | 6 | from testcontainers.core.utils import is_arm 7 | from testcontainers.db2 import Db2Container 8 | 9 | 10 | @pytest.mark.skipif(is_arm(), reason="db2 container not available for ARM") 11 | @pytest.mark.parametrize("version", ["11.5.9.0", "11.5.8.0"]) 12 | def test_docker_run_db2(version: str): 13 | with Db2Container(f"icr.io/db2_community/db2:{version}", password="password") as db2: 14 | engine = sqlalchemy.create_engine(db2.get_connection_url()) 15 | with engine.begin() as connection: 16 | result = connection.execute(sqlalchemy.text("select service_level from sysibmadm.env_inst_info")) 17 | for row in result: 18 | assert row[0] == f"DB2 v{version}" 19 | 20 | 21 | # This is a feature in the generic DbContainer class 22 | # but it can't be tested on its own 23 | # so is tested in various database modules: 24 | # - mysql / mariadb 25 | # - postgresql 26 | # - sqlserver 27 | # - mongodb 28 | # - db2 29 | def test_quoted_password(): 30 | user = "db2inst1" 31 | dbname = "testdb" 32 | password = "p@$%25+0&%rd :/!=?" 33 | quoted_password = "p%40%24%2525+0%26%25rd %3A%2F%21%3D%3F" 34 | kwargs = { 35 | "username": user, 36 | "password": password, 37 | "dbname": dbname, 38 | } 39 | with Db2Container("icr.io/db2_community/db2:11.5.9.0", **kwargs) as container: 40 | port = container.get_exposed_port(50000) 41 | host = container.get_container_host_ip() 42 | expected_url = f"db2+ibm_db://{user}:{quoted_password}@{host}:{port}/{dbname}" 43 | assert expected_url == container.get_connection_url() 44 | -------------------------------------------------------------------------------- /modules/elasticsearch/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.elasticsearch.ElasticSearchContainer 2 | .. title:: testcontainers.elasticsearch.ElasticSearchContainer 3 | -------------------------------------------------------------------------------- /modules/elasticsearch/tests/test_elasticsearch.py: -------------------------------------------------------------------------------- 1 | import json 2 | import urllib.request 3 | 4 | import pytest 5 | 6 | from testcontainers.elasticsearch import ElasticSearchContainer 7 | 8 | 9 | # The versions below should reflect the latest stable releases 10 | @pytest.mark.parametrize("version", ["7.17.18", "8.12.2"]) 11 | def test_docker_run_elasticsearch(version): 12 | with ElasticSearchContainer(f"elasticsearch:{version}", mem_limit="3G") as es: 13 | resp = urllib.request.urlopen(es.get_url()) 14 | assert json.loads(resp.read().decode())["version"]["number"] == version 15 | -------------------------------------------------------------------------------- /modules/generic/README.rst: -------------------------------------------------------------------------------- 1 | :code:`testcontainers-generic` is a set of generic containers modules that can be used to creat containers. 2 | 3 | .. autoclass:: testcontainers.generic.ServerContainer 4 | .. title:: testcontainers.generic.ServerContainer 5 | 6 | FastAPI container that is using :code:`ServerContainer` 7 | 8 | .. doctest:: 9 | 10 | >>> from testcontainers.generic import ServerContainer 11 | >>> from testcontainers.core.waiting_utils import wait_for_logs 12 | 13 | >>> with DockerImage(path="./modules/generic/tests/samples/fastapi", tag="fastapi-test:latest") as image: 14 | ... with ServerContainer(port=80, image=image) as fastapi_server: 15 | ... delay = wait_for_logs(fastapi_server, "Uvicorn running on http://0.0.0.0:80") 16 | ... fastapi_server.get_api_url = lambda: fastapi_server._create_connection_url() + "/api/v1/" 17 | ... client = fastapi_server.get_client() 18 | ... response = client.get("/") 19 | ... assert response.status_code == 200 20 | ... assert response.json() == {"Status": "Working"} 21 | 22 | A more advance use-case, where we are using a FastAPI container that is using Redis container: 23 | 24 | .. doctest:: 25 | 26 | >>> from testcontainers.redis import RedisContainer 27 | >>> from testcontainers.generic import ServerContainer 28 | 29 | >>> with RedisContainer() as redis: 30 | ... redis_container_port = redis.port 31 | ... redis_container_ip_address = redis.get_docker_client().bridge_ip(redis._container.id) 32 | 33 | ... with DockerImage(path="./modules/generic/tests/samples/advance_1", tag="advance-1:latest") as image: 34 | ... web_server = ServerContainer(port=80, image=image) 35 | ... web_server.with_env(key="REDIS_HOST", value=redis_container_ip_address) 36 | ... web_server.with_env(key="REDIS_PORT", value=redis_container_port) 37 | 38 | ... with web_server: 39 | ... web_server.get_api_url = lambda: web_server._create_connection_url() 40 | ... client = web_server.get_client() 41 | 42 | ... response = client.get("/") 43 | ... assert response.status_code == 200, "Server request failed" 44 | ... assert response.json() == {"Status": "ok"} 45 | 46 | ... test_data = {"key": "test_key", "value": "test_value"} 47 | ... response = client.post("/set", params=test_data) 48 | ... assert response.status_code == 200, "Failed to set data" 49 | 50 | ... response = client.get(f"/get/{test_data['key']}") 51 | ... assert response.status_code == 200, "Failed to get data" 52 | ... assert response.json() == {"key": test_data["key"], "value": test_data["value"]} 53 | -------------------------------------------------------------------------------- /modules/generic/testcontainers/generic/__init__.py: -------------------------------------------------------------------------------- 1 | from .server import ServerContainer # noqa: F401 2 | -------------------------------------------------------------------------------- /modules/generic/testcontainers/generic/server.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from urllib.error import HTTPError, URLError 3 | from urllib.request import urlopen 4 | 5 | import httpx 6 | 7 | from testcontainers.core.container import DockerContainer 8 | from testcontainers.core.exceptions import ContainerStartException 9 | from testcontainers.core.image import DockerImage 10 | from testcontainers.core.waiting_utils import wait_container_is_ready 11 | 12 | # This comment can be removed (Used for testing) 13 | 14 | 15 | class ServerContainer(DockerContainer): 16 | """ 17 | Container for a generic server that is based on a custom image. 18 | 19 | Example: 20 | 21 | .. doctest:: 22 | 23 | >>> import httpx 24 | >>> from testcontainers.generic import ServerContainer 25 | >>> from testcontainers.core.waiting_utils import wait_for_logs 26 | >>> from testcontainers.core.image import DockerImage 27 | 28 | >>> with DockerImage(path="./modules/generic/tests/samples/python_server", tag="test-srv:latest") as image: 29 | ... with ServerContainer(port=9000, image=image) as srv: 30 | ... url = srv._create_connection_url() 31 | ... response = httpx.get(f"{url}", timeout=5) 32 | ... assert response.status_code == 200, "Response status code is not 200" 33 | ... delay = wait_for_logs(srv, "GET / HTTP/1.1") 34 | 35 | 36 | :param port: Port to be exposed on the container. 37 | :param image: Docker image to be used for the container. 38 | """ 39 | 40 | def __init__(self, port: int, image: Union[str, DockerImage]) -> None: 41 | super().__init__(str(image)) 42 | self.internal_port = port 43 | self.with_exposed_ports(self.internal_port) 44 | 45 | @wait_container_is_ready(HTTPError, URLError) 46 | def _connect(self) -> None: 47 | # noinspection HttpUrlsUsage 48 | url = self._create_connection_url() 49 | try: 50 | with urlopen(url) as r: 51 | assert b"" in r.read() 52 | except HTTPError as e: 53 | # 404 is expected, as the server may not have the specific endpoint we are looking for 54 | if e.code == 404: 55 | pass 56 | else: 57 | raise 58 | 59 | def get_api_url(self) -> str: 60 | raise NotImplementedError 61 | 62 | def _create_connection_url(self) -> str: 63 | if self._container is None: 64 | raise ContainerStartException("container has not been started") 65 | host = self.get_container_host_ip() 66 | exposed_port = self.get_exposed_port(self.internal_port) 67 | url = f"http://{host}:{exposed_port}" 68 | return url 69 | 70 | def start(self) -> "ServerContainer": 71 | super().start() 72 | self._connect() 73 | return self 74 | 75 | def stop(self, force=True, delete_volume=True) -> None: 76 | super().stop(force, delete_volume) 77 | 78 | def get_client(self) -> httpx.Client: 79 | return httpx.Client(base_url=self.get_api_url()) 80 | 81 | def get_stdout(self) -> str: 82 | return self.get_logs()[0].decode("utf-8") 83 | -------------------------------------------------------------------------------- /modules/generic/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from typing import Callable 3 | from testcontainers.core.container import DockerClient 4 | 5 | 6 | @pytest.fixture 7 | def check_for_image() -> Callable[[str, bool], None]: 8 | """Warp the check_for_image function in a fixture""" 9 | 10 | def _check_for_image(image_short_id: str, cleaned: bool) -> None: 11 | """ 12 | Validates if the image is present or not. 13 | 14 | :param image_short_id: The short id of the image 15 | :param cleaned: True if the image should not be present, False otherwise 16 | """ 17 | client = DockerClient() 18 | images = client.client.images.list() 19 | found = any(image.short_id.endswith(image_short_id) for image in images) 20 | assert found is not cleaned, f"Image {image_short_id} was {'found' if cleaned else 'not found'}" 21 | 22 | return _check_for_image 23 | -------------------------------------------------------------------------------- /modules/generic/tests/samples/advance_1/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9 2 | 3 | WORKDIR /app 4 | 5 | RUN pip install fastapi[standard] redis 6 | 7 | COPY ./app /app 8 | 9 | EXPOSE 80 10 | 11 | CMD ["fastapi", "run", "main.py", "--port", "80"] 12 | -------------------------------------------------------------------------------- /modules/generic/tests/samples/advance_1/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testcontainers/testcontainers-python/5f34ad0e934a83b49c14b5b0d63284448eac1940/modules/generic/tests/samples/advance_1/app/__init__.py -------------------------------------------------------------------------------- /modules/generic/tests/samples/advance_1/app/main.py: -------------------------------------------------------------------------------- 1 | # This app will use redis to store given key-value pairs. 2 | 3 | import os 4 | import redis 5 | 6 | from fastapi import FastAPI 7 | 8 | 9 | app = FastAPI() 10 | 11 | redis_host = os.getenv("REDIS_HOST") 12 | redis_port = os.getenv("REDIS_PORT") 13 | redis_client = redis.Redis(host=redis_host, port=redis_port) 14 | redis_client.ping() 15 | 16 | 17 | @app.get("/") 18 | def health_check(): 19 | return {"status": "ok"} 20 | 21 | 22 | @app.get("/get/{key}") 23 | def read_item(key: str): 24 | return {key: redis_client.get(key)} 25 | 26 | 27 | @app.post("/set") 28 | def create_item(key: str, value: str): 29 | redis_client.set(key, value) 30 | return {key: value} 31 | -------------------------------------------------------------------------------- /modules/generic/tests/samples/fastapi/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9 2 | 3 | WORKDIR /app 4 | 5 | RUN pip install fastapi[standard] 6 | 7 | COPY ./app /app 8 | 9 | EXPOSE 80 10 | 11 | CMD ["fastapi", "run", "main.py", "--port", "80"] 12 | -------------------------------------------------------------------------------- /modules/generic/tests/samples/fastapi/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testcontainers/testcontainers-python/5f34ad0e934a83b49c14b5b0d63284448eac1940/modules/generic/tests/samples/fastapi/app/__init__.py -------------------------------------------------------------------------------- /modules/generic/tests/samples/fastapi/app/main.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI 2 | 3 | app = FastAPI() 4 | 5 | 6 | @app.get("/api/v1/") 7 | def read_root(): 8 | return {"Status": "Working"} 9 | -------------------------------------------------------------------------------- /modules/generic/tests/samples/python_server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3-alpine 2 | EXPOSE 9000 3 | CMD ["python", "-m", "http.server", "9000"] 4 | -------------------------------------------------------------------------------- /modules/generic/tests/test_generic.py: -------------------------------------------------------------------------------- 1 | import re 2 | from pathlib import Path 3 | from typing import Optional 4 | 5 | import pytest 6 | from httpx import get 7 | 8 | from testcontainers.core.waiting_utils import wait_for_logs 9 | from testcontainers.core.image import DockerImage 10 | from testcontainers.generic import ServerContainer 11 | 12 | TEST_DIR = Path(__file__).parent 13 | 14 | 15 | @pytest.mark.parametrize("test_image_cleanup", [True, False]) 16 | @pytest.mark.parametrize("test_image_tag", [None, "custom-image:test"]) 17 | def test_server_container(test_image_tag: Optional[str], test_image_cleanup: bool, check_for_image, port=9000): 18 | with ( 19 | DockerImage( 20 | path=TEST_DIR / "samples/python_server", 21 | tag=test_image_tag, 22 | clean_up=test_image_cleanup, 23 | # 24 | ) as docker_image, 25 | ServerContainer(port=port, image=docker_image) as srv, 26 | ): 27 | image_short_id = docker_image.short_id 28 | image_build_logs = docker_image.get_logs() 29 | # check if dict is in any of the logs 30 | assert {"stream": f"Step 2/3 : EXPOSE {port}"} in image_build_logs, "Image logs mismatch" 31 | assert (port, None) in srv.ports.items(), "Port mismatch" 32 | with pytest.raises(NotImplementedError): 33 | srv.get_api_url() 34 | test_url = srv._create_connection_url() 35 | assert re.match(r"http://localhost:\d+", test_url), "Connection URL mismatch" 36 | 37 | check_for_image(image_short_id, test_image_cleanup) 38 | 39 | 40 | def test_server_container_no_port(): 41 | with pytest.raises(TypeError): 42 | with ServerContainer(path="./modules/generic/tests/samples/python_server", tag="test-srv:latest"): 43 | pass 44 | 45 | 46 | def test_like_doctest(): 47 | with DockerImage(path=TEST_DIR / "samples/python_server", tag="test-srv:latest") as image: 48 | with ServerContainer(port=9000, image=image) as srv: 49 | url = srv._create_connection_url() 50 | response = get(f"{url}", timeout=5) 51 | assert response.status_code == 200, "Response status code is not 200" 52 | delay = wait_for_logs(srv, "GET / HTTP/1.1") 53 | print(delay) 54 | -------------------------------------------------------------------------------- /modules/google/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.google.DatastoreContainer 2 | .. title:: testcontainers.google.DatastoreContainer 3 | .. autoclass:: testcontainers.google.PubSubContainer 4 | .. title:: testcontainers.google.PubSubContainer 5 | -------------------------------------------------------------------------------- /modules/google/testcontainers/google/__init__.py: -------------------------------------------------------------------------------- 1 | from .datastore import DatastoreContainer # noqa: F401 2 | from .pubsub import PubSubContainer # noqa: F401 3 | -------------------------------------------------------------------------------- /modules/google/testcontainers/google/datastore.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | import os 14 | from unittest.mock import patch 15 | 16 | from google.cloud import datastore 17 | from testcontainers.core.container import DockerContainer 18 | from testcontainers.core.waiting_utils import wait_for_logs 19 | 20 | 21 | class DatastoreContainer(DockerContainer): 22 | """ 23 | Datastore container for testing managed message queues. 24 | 25 | Example: 26 | 27 | The example will spin up a Google Cloud Datastore emulator that you can use for integration 28 | tests. The :code:`datastore` instance provides convenience methods :code:`get_datastore_client` to 29 | connect to the emulator without having to set the environment variable :code:`DATASTORE_EMULATOR_HOST`. 30 | 31 | .. doctest:: 32 | 33 | >>> from testcontainers.google import DatastoreContainer 34 | 35 | >>> config = DatastoreContainer() 36 | >>> with config as datastore: 37 | ... datastore_client = datastore.get_datastore_client() 38 | """ 39 | 40 | def __init__( 41 | self, 42 | image: str = "google/cloud-sdk:emulators", 43 | project: str = "test-project", 44 | port: int = 8081, 45 | **kwargs, 46 | ) -> None: 47 | super().__init__(image=image, **kwargs) 48 | self.project = project 49 | self.port = port 50 | self.with_exposed_ports(self.port) 51 | self.with_command( 52 | f"gcloud beta emulators datastore start --no-store-on-disk --project={project} --host-port=0.0.0.0:{port}" 53 | ) 54 | 55 | def get_datastore_emulator_host(self) -> str: 56 | return f"{self.get_container_host_ip()}:{self.get_exposed_port(self.port)}" 57 | 58 | def get_datastore_client(self, **kwargs) -> datastore.Client: 59 | wait_for_logs(self, "Dev App Server is now running.", timeout=30.0) 60 | env_vars = { 61 | "DATASTORE_DATASET": self.project, 62 | "DATASTORE_EMULATOR_HOST": self.get_datastore_emulator_host(), 63 | "DATASTORE_EMULATOR_HOST_PATH": f"{self.get_datastore_emulator_host()}/datastore", 64 | "DATASTORE_HOST": f"http://{self.get_datastore_emulator_host()}", 65 | "DATASTORE_PROJECT_ID": self.project, 66 | } 67 | with patch.dict(os.environ, env_vars): 68 | return datastore.Client(**kwargs) 69 | -------------------------------------------------------------------------------- /modules/google/testcontainers/google/pubsub.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | import os 14 | from unittest.mock import patch 15 | 16 | from google.cloud import pubsub 17 | from testcontainers.core.container import DockerContainer 18 | 19 | 20 | class PubSubContainer(DockerContainer): 21 | """ 22 | PubSub container for testing managed message queues. 23 | 24 | Example: 25 | 26 | The example will spin up a Google Cloud PubSub emulator that you can use for integration 27 | tests. The :code:`pubsub` instance provides convenience methods :code:`get_publisher` and 28 | :code:`get_subscriber` to connect to the emulator without having to set the environment 29 | variable :code:`PUBSUB_EMULATOR_HOST`. 30 | 31 | .. doctest:: 32 | 33 | >>> from testcontainers.google import PubSubContainer 34 | 35 | >>> config = PubSubContainer() 36 | >>> with config as pubsub: 37 | ... publisher = pubsub.get_publisher_client() 38 | ... topic_path = publisher.topic_path(pubsub.project, "my-topic") 39 | ... topic = publisher.create_topic(name=topic_path) 40 | """ 41 | 42 | def __init__( 43 | self, image: str = "google/cloud-sdk:emulators", project: str = "test-project", port: int = 8432, **kwargs 44 | ) -> None: 45 | super().__init__(image=image, **kwargs) 46 | self.project = project 47 | self.port = port 48 | self.with_exposed_ports(self.port) 49 | self.with_command(f"gcloud beta emulators pubsub start --project={project} --host-port=0.0.0.0:{port}") 50 | 51 | def get_pubsub_emulator_host(self) -> str: 52 | return f"{self.get_container_host_ip()}:{self.get_exposed_port(self.port)}" 53 | 54 | def _get_client(self, cls: type, **kwargs) -> dict: 55 | with patch.dict(os.environ, PUBSUB_EMULATOR_HOST=self.get_pubsub_emulator_host()): 56 | return cls(**kwargs) 57 | 58 | def get_publisher_client(self, **kwargs) -> pubsub.PublisherClient: 59 | from google.auth import credentials 60 | 61 | kwargs["client_options"] = {"api_endpoint": self.get_pubsub_emulator_host()} 62 | kwargs["credentials"] = credentials.AnonymousCredentials() 63 | return self._get_client(pubsub.PublisherClient, **kwargs) 64 | 65 | def get_subscriber_client(self, **kwargs) -> pubsub.SubscriberClient: 66 | from google.auth import credentials 67 | 68 | kwargs["client_options"] = {"api_endpoint": self.get_pubsub_emulator_host()} 69 | kwargs["credentials"] = credentials.AnonymousCredentials() 70 | return self._get_client(pubsub.SubscriberClient, **kwargs) 71 | -------------------------------------------------------------------------------- /modules/google/tests/test_google.py: -------------------------------------------------------------------------------- 1 | from queue import Queue 2 | from google.cloud.datastore import Entity 3 | 4 | from testcontainers.core.waiting_utils import wait_for_logs 5 | from testcontainers.google import PubSubContainer, DatastoreContainer 6 | 7 | 8 | def test_pubsub_container(): 9 | pubsub: PubSubContainer 10 | with PubSubContainer() as pubsub: 11 | wait_for_logs(pubsub, r"Server started, listening on \d+", timeout=60) 12 | # Create a new topic 13 | publisher = pubsub.get_publisher_client() 14 | topic_path = publisher.topic_path(pubsub.project, "my-topic") 15 | publisher.create_topic(name=topic_path) 16 | 17 | # Create a subscription 18 | subscriber = pubsub.get_subscriber_client() 19 | subscription_path = subscriber.subscription_path(pubsub.project, "my-subscription") 20 | subscriber.create_subscription(name=subscription_path, topic=topic_path) 21 | 22 | # Publish a message 23 | publisher.publish(topic_path, b"Hello world!") 24 | 25 | # Receive the message 26 | queue = Queue() 27 | subscriber.subscribe(subscription_path, queue.put) 28 | message = queue.get(timeout=1) 29 | assert message.data == b"Hello world!" 30 | message.ack() 31 | 32 | 33 | def test_datastore_container_creation(): 34 | # Initialize the Datastore emulator container 35 | with DatastoreContainer() as datastore: 36 | # Obtain a datastore client configured to connect to the emulator 37 | client = datastore.get_datastore_client() 38 | 39 | # Define a unique key for a test entity to ensure test isolation 40 | key = client.key("TestKind", "test_id_1") 41 | 42 | # Create and insert a new entity 43 | entity = Entity(key=key) 44 | entity.update({"foo": "bar"}) 45 | client.put(entity) 46 | 47 | # Fetch the just-inserted entity directly 48 | fetched_entity = client.get(key) 49 | 50 | # Assert that the fetched entity matches what was inserted 51 | assert fetched_entity is not None, "Entity was not found in the datastore." 52 | assert fetched_entity["foo"] == "bar", "Entity attribute 'foo' did not match expected value 'bar'." 53 | 54 | 55 | def test_datastore_container_isolation(): 56 | # Initialize the Datastore emulator container 57 | with DatastoreContainer() as datastore: 58 | # Obtain a datastore client configured to connect to the emulator 59 | client = datastore.get_datastore_client() 60 | 61 | # Define a unique key for a test entity to ensure test isolation 62 | key = client.key("TestKind", "test_id_1") 63 | 64 | # Create and insert a new entity 65 | entity = Entity(key=key) 66 | entity.update({"foo": "bar"}) 67 | client.put(entity) 68 | 69 | # Create a second container and try to fetch the entity to makesure its a different container 70 | with DatastoreContainer() as datastore2: 71 | assert datastore.get_datastore_emulator_host() != datastore2.get_datastore_emulator_host(), ( 72 | "Datastore containers use the same port." 73 | ) 74 | client2 = datastore2.get_datastore_client() 75 | fetched_entity2 = client2.get(key) 76 | assert fetched_entity2 is None, "Entity was found in the datastore." 77 | -------------------------------------------------------------------------------- /modules/index.rst: -------------------------------------------------------------------------------- 1 | Community Modules 2 | ================= 3 | 4 | .. 5 | glob: 6 | https://stackoverflow.com/a/44572883/4971476 7 | 8 | .. toctree:: 9 | :glob: 10 | 11 | */README 12 | -------------------------------------------------------------------------------- /modules/influxdb/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.influxdb.InfluxDbContainer 2 | .. title:: testcontainers.influxdb.InfluxDbContainer 3 | -------------------------------------------------------------------------------- /modules/influxdb/testcontainers/influxdb1/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | 14 | from typing import Optional 15 | 16 | from influxdb import InfluxDBClient 17 | 18 | from testcontainers.influxdb import InfluxDbContainer 19 | 20 | 21 | class InfluxDb1Container(InfluxDbContainer): 22 | """ 23 | Docker container for InfluxDB 1.x. 24 | Official Docker images for InfluxDB are hosted at https://hub.docker.com/_/influxdb/. 25 | 26 | Example: 27 | 28 | .. doctest:: 29 | 30 | >>> from testcontainers.influxdb1 import InfluxDbContainer 31 | 32 | >>> with InfluxDbContainer() as influxdb: 33 | ... version = influxdb.get_version() 34 | """ 35 | 36 | def __init__( 37 | self, 38 | image: str = "influxdb:1.8", 39 | # in the container, the default port for influxdb is often 8086 and not likely to change 40 | container_port: int = 8086, 41 | # specifies the port on the host machine where influxdb is exposed; a random available port otherwise 42 | host_port: Optional[int] = None, 43 | **docker_client_kw, 44 | ): 45 | super().__init__(image, container_port, host_port, **docker_client_kw) 46 | 47 | def get_client(self, **client_kwargs): 48 | """ 49 | Returns an instance of the influxdb client, for InfluxDB 1.x versions. 50 | Note that this client is not maintained anymore, but it is the only 51 | official client available for 1.x InfluxDB versions: 52 | - https://github.com/influxdata/influxdb-python 53 | - https://pypi.org/project/influxdb/ 54 | 55 | To some extent, you can use the v2 client with InfluxDB v1.8+: 56 | - https://github.com/influxdata/influxdb-client-python#influxdb-18-api-compatibility 57 | """ 58 | 59 | return InfluxDBClient(self.get_container_host_ip(), self.get_exposed_port(self.container_port), **client_kwargs) 60 | 61 | def start(self) -> "InfluxDb1Container": 62 | """ 63 | Overridden for better typing reason 64 | """ 65 | return super().start() 66 | -------------------------------------------------------------------------------- /modules/influxdb/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testcontainers/testcontainers-python/5f34ad0e934a83b49c14b5b0d63284448eac1940/modules/influxdb/tests/__init__.py -------------------------------------------------------------------------------- /modules/k3s/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.k3s.K3SContainer 2 | .. title:: testcontainers.k3s.K3SContainer 3 | -------------------------------------------------------------------------------- /modules/k3s/testcontainers/k3s/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | 14 | import logging 15 | 16 | from testcontainers.core.config import testcontainers_config 17 | from testcontainers.core.container import DockerContainer 18 | from testcontainers.core.waiting_utils import wait_for_logs 19 | 20 | 21 | class K3SContainer(DockerContainer): 22 | """ 23 | K3S container. 24 | 25 | Example: 26 | 27 | .. doctest:: 28 | 29 | >>> import yaml 30 | >>> from testcontainers.k3s import K3SContainer 31 | >>> from kubernetes import client, config 32 | 33 | >>> with K3SContainer() as k3s: 34 | ... config.load_kube_config_from_dict(yaml.safe_load(k3s.config_yaml())) 35 | ... pod = client.CoreV1Api().list_pod_for_all_namespaces(limit=1) 36 | ... assert len(pod.items) > 0, "Unable to get running nodes from k3s cluster" 37 | """ 38 | 39 | KUBE_SECURE_PORT = 6443 40 | RANCHER_WEBHOOK_PORT = 8443 41 | 42 | def __init__(self, image="rancher/k3s:latest", enable_cgroup_mount=True, **kwargs) -> None: 43 | super().__init__(image, **kwargs) 44 | self.with_exposed_ports(self.KUBE_SECURE_PORT, self.RANCHER_WEBHOOK_PORT) 45 | self.with_env("K3S_URL", f"https://{self.get_container_host_ip()}:{self.KUBE_SECURE_PORT}") 46 | self.with_command("server --disable traefik --tls-san=" + self.get_container_host_ip()) 47 | self.with_kwargs(privileged=True, tmpfs={"/run": "", "/var/run": ""}) 48 | if enable_cgroup_mount: 49 | self.with_volume_mapping("/sys/fs/cgroup", "/sys/fs/cgroup", "rw") 50 | else: 51 | logging.warning("'enable_cgroup_mount' is experimental, see testcontainers/testcontainers-python#591)") 52 | 53 | def _connect(self) -> None: 54 | wait_for_logs(self, predicate="Node controller sync successful", timeout=testcontainers_config.timeout) 55 | 56 | def start(self) -> "K3SContainer": 57 | super().start() 58 | self._connect() 59 | return self 60 | 61 | def config_yaml(self) -> str: 62 | """This function returns the kubernetes config yaml which can be used 63 | to initialise k8s client 64 | """ 65 | execution = self.get_wrapped_container().exec_run(["cat", "/etc/rancher/k3s/k3s.yaml"]) 66 | config_yaml = execution.output.decode("utf-8").replace( 67 | f"https://127.0.0.1:{self.KUBE_SECURE_PORT}", 68 | f"https://{self.get_container_host_ip()}:{self.get_exposed_port(self.KUBE_SECURE_PORT)}", 69 | ) 70 | return config_yaml 71 | -------------------------------------------------------------------------------- /modules/k3s/tests/test_k3s.py: -------------------------------------------------------------------------------- 1 | # The versions below were the current supported versions at time of writing (2022-08-11) 2 | import yaml 3 | from kubernetes import client, config 4 | 5 | from testcontainers.k3s import K3SContainer 6 | 7 | 8 | def test_docker_run_k3s(): 9 | with K3SContainer() as k3s: 10 | config.load_kube_config_from_dict(yaml.safe_load(k3s.config_yaml())) 11 | pod = client.CoreV1Api().list_pod_for_all_namespaces(limit=1) 12 | assert len(pod.items) > 0, "Unable to get running nodes from k3s cluster" 13 | -------------------------------------------------------------------------------- /modules/kafka/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.kafka.KafkaContainer 2 | .. title:: testcontainers.kafka.KafkaContainer 3 | .. autoclass:: testcontainers.kafka.RedpandaContainer 4 | -------------------------------------------------------------------------------- /modules/kafka/testcontainers/kafka/_redpanda.py: -------------------------------------------------------------------------------- 1 | import tarfile 2 | import time 3 | from io import BytesIO 4 | from textwrap import dedent 5 | 6 | from testcontainers.core.container import DockerContainer 7 | from testcontainers.core.waiting_utils import wait_for_logs 8 | 9 | 10 | class RedpandaContainer(DockerContainer): 11 | """ 12 | Redpanda container. 13 | 14 | Example: 15 | 16 | .. doctest:: 17 | 18 | >>> from testcontainers.kafka import RedpandaContainer 19 | 20 | >>> with RedpandaContainer() as redpanda: 21 | ... connection = redpanda.get_bootstrap_server() 22 | """ 23 | 24 | TC_START_SCRIPT = "/tc-start.sh" 25 | 26 | def __init__( 27 | self, 28 | image: str = "docker.redpanda.com/redpandadata/redpanda:v23.1.13", 29 | **kwargs, 30 | ) -> None: 31 | kwargs["entrypoint"] = "sh" 32 | super().__init__(image, **kwargs) 33 | self.redpanda_port = 9092 34 | self.schema_registry_port = 8081 35 | self.with_exposed_ports(self.redpanda_port, self.schema_registry_port) 36 | 37 | def get_bootstrap_server(self) -> str: 38 | host = self.get_container_host_ip() 39 | port = self.get_exposed_port(self.redpanda_port) 40 | return f"{host}:{port}" 41 | 42 | def get_schema_registry_address(self) -> str: 43 | host = self.get_container_host_ip() 44 | port = self.get_exposed_port(self.schema_registry_port) 45 | return f"http://{host}:{port}" 46 | 47 | def tc_start(self) -> None: 48 | host = self.get_container_host_ip() 49 | port = self.get_exposed_port(self.redpanda_port) 50 | 51 | data = ( 52 | dedent( 53 | f""" 54 | #!/bin/bash 55 | /usr/bin/rpk redpanda start --mode dev-container --smp 1 --memory 1G \ 56 | --kafka-addr PLAINTEXT://0.0.0.0:29092,OUTSIDE://0.0.0.0:9092 \ 57 | --advertise-kafka-addr PLAINTEXT://127.0.0.1:29092,OUTSIDE://{host}:{port} 58 | """ 59 | ) 60 | .strip() 61 | .encode("utf-8") 62 | ) 63 | 64 | self.create_file(data, RedpandaContainer.TC_START_SCRIPT) 65 | 66 | def start(self, timeout=10) -> "RedpandaContainer": 67 | script = RedpandaContainer.TC_START_SCRIPT 68 | command = f'-c "while [ ! -f {script} ]; do sleep 0.1; done; sh {script}"' 69 | self.with_command(command) 70 | super().start() 71 | self.tc_start() 72 | wait_for_logs(self, r".*Started Kafka API server.*", timeout=timeout) 73 | return self 74 | 75 | def create_file(self, content: bytes, path: str) -> None: 76 | with BytesIO() as archive, tarfile.TarFile(fileobj=archive, mode="w") as tar: 77 | tarinfo = tarfile.TarInfo(name=path) 78 | tarinfo.size = len(content) 79 | tarinfo.mtime = time.time() 80 | tar.addfile(tarinfo, BytesIO(content)) 81 | archive.seek(0) 82 | self.get_wrapped_container().put_archive("/", archive) 83 | -------------------------------------------------------------------------------- /modules/kafka/tests/test_kafka.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from kafka import KafkaAdminClient, KafkaConsumer, KafkaProducer, TopicPartition 3 | 4 | from testcontainers.core.network import Network 5 | from testcontainers.kafka import KafkaContainer, kafka_config 6 | 7 | 8 | def test_kafka_producer_consumer(): 9 | with KafkaContainer() as container: 10 | produce_and_consume_kafka_message(container) 11 | 12 | 13 | def test_kafka_with_kraft_producer_consumer(): 14 | with KafkaContainer().with_kraft() as container: 15 | assert container.kraft_enabled 16 | produce_and_consume_kafka_message(container) 17 | 18 | 19 | def test_kafka_producer_consumer_custom_port(): 20 | with KafkaContainer(port=9888) as container: 21 | assert container.port == 9888 22 | produce_and_consume_kafka_message(container) 23 | 24 | 25 | def test_kafka_on_networks(monkeypatch: pytest.MonkeyPatch): 26 | """ 27 | this test case comes from testcontainers/testcontainers-python#637 28 | """ 29 | monkeypatch.setattr(kafka_config, "limit_broker_to_first_host", True) 30 | 31 | with Network() as network: 32 | kafka_ctr = KafkaContainer() 33 | kafka_ctr.with_network(network) 34 | kafka_ctr.with_network_aliases("kafka") 35 | 36 | with kafka_ctr: 37 | print("started") # Will not reach here and timeout 38 | admin_client = KafkaAdminClient(bootstrap_servers=[kafka_ctr.get_bootstrap_server()]) 39 | print(admin_client.describe_cluster()) 40 | 41 | 42 | def produce_and_consume_kafka_message(container): 43 | topic = "test-topic" 44 | bootstrap_server = container.get_bootstrap_server() 45 | 46 | producer = KafkaProducer(bootstrap_servers=[bootstrap_server]) 47 | producer.send(topic, b"verification message") 48 | producer.flush() 49 | producer.close() 50 | 51 | consumer = KafkaConsumer(bootstrap_servers=[bootstrap_server]) 52 | tp = TopicPartition(topic, 0) 53 | consumer.assign([tp]) 54 | consumer.seek_to_beginning() 55 | assert consumer.end_offsets([tp])[tp] == 1, "Expected exactly one test message to be present on test topic !" 56 | -------------------------------------------------------------------------------- /modules/kafka/tests/test_redpanda.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from requests import post, get 3 | from json import dumps 4 | 5 | from kafka import KafkaConsumer, KafkaProducer, TopicPartition, KafkaAdminClient 6 | from kafka.admin import NewTopic 7 | 8 | from testcontainers.kafka import RedpandaContainer 9 | 10 | 11 | def test_redpanda_producer_consumer(): 12 | with RedpandaContainer() as container: 13 | produce_and_consume_message(container) 14 | 15 | 16 | @pytest.mark.parametrize("version", ["v23.1.13", "v23.3.10"]) 17 | def test_redpanda_confluent_version(version): 18 | with RedpandaContainer(image=f"docker.redpanda.com/redpandadata/redpanda:{version}") as container: 19 | produce_and_consume_message(container) 20 | 21 | 22 | def test_schema_registry(): 23 | with RedpandaContainer() as container: 24 | address = container.get_schema_registry_address() 25 | subject_name = "test-subject-value" 26 | url = f"{address}/subjects" 27 | 28 | payload = {"schema": dumps({"type": "string"})} 29 | headers = {"Content-Type": "application/vnd.schemaregistry.v1+json"} 30 | create_result = post(f"{url}/{subject_name}/versions", data=dumps(payload), headers=headers) 31 | assert create_result.status_code == 200 32 | 33 | result = get(url) 34 | assert result.status_code == 200 35 | assert subject_name in result.json() 36 | 37 | 38 | def produce_and_consume_message(container): 39 | topic = "test-topic" 40 | bootstrap_server = container.get_bootstrap_server() 41 | 42 | admin = KafkaAdminClient(bootstrap_servers=[bootstrap_server]) 43 | admin.create_topics([NewTopic(topic, 1, 1)]) 44 | 45 | producer = KafkaProducer(bootstrap_servers=[bootstrap_server]) 46 | future = producer.send(topic, b"verification message") 47 | future.get(timeout=10) 48 | producer.close() 49 | 50 | consumer = KafkaConsumer(bootstrap_servers=[bootstrap_server]) 51 | tp = TopicPartition(topic, 0) 52 | consumer.assign([tp]) 53 | consumer.seek_to_beginning() 54 | assert consumer.end_offsets([tp])[tp] == 1, "Expected exactly one test message to be present on test topic !" 55 | -------------------------------------------------------------------------------- /modules/keycloak/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.keycloak.KeycloakContainer 2 | .. title:: testcontainers.keycloak.KeycloakContainer 3 | -------------------------------------------------------------------------------- /modules/keycloak/tests/test_keycloak.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from testcontainers.keycloak import KeycloakContainer 3 | 4 | 5 | @pytest.mark.parametrize("image_version", ["26.0.0", "25.0", "24.0.1", "18.0"]) 6 | def test_docker_run_keycloak(image_version: str): 7 | with KeycloakContainer(f"quay.io/keycloak/keycloak:{image_version}") as keycloak_admin: 8 | assert keycloak_admin.get_client().users_count() == 1 9 | -------------------------------------------------------------------------------- /modules/localstack/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.localstack.LocalStackContainer 2 | .. title:: testcontainers.localstack.LocalStackContainer 3 | -------------------------------------------------------------------------------- /modules/localstack/tests/test_localstack.py: -------------------------------------------------------------------------------- 1 | import json 2 | import urllib 3 | 4 | from testcontainers.localstack import LocalStackContainer 5 | 6 | 7 | def test_docker_run_localstack(): 8 | with LocalStackContainer() as localstack: 9 | resp = urllib.request.urlopen(f"{localstack.get_url()}/health") 10 | services = json.loads(resp.read().decode())["services"] 11 | 12 | # Check that all services are running 13 | assert all(value == "available" for value in services.values()) 14 | # Check that some of the services keys 15 | assert all(test_service in services for test_service in ["dynamodb", "sns", "sqs"]) 16 | 17 | 18 | def test_localstack_boto3(): 19 | from testcontainers.localstack import LocalStackContainer 20 | 21 | with LocalStackContainer(image="localstack/localstack:2.0.1") as localstack: 22 | dynamo_client = localstack.get_client("dynamodb") 23 | tables = dynamo_client.list_tables() 24 | assert tables["TableNames"] == [] 25 | -------------------------------------------------------------------------------- /modules/mailpit/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.mailpit.MailpitUser 2 | .. autoclass:: testcontainers.mailpit.MailpitContainer 3 | .. title:: testcontainers.mailpit.MailpitContainer 4 | -------------------------------------------------------------------------------- /modules/mailpit/testcontainers/mailpit/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testcontainers/testcontainers-python/5f34ad0e934a83b49c14b5b0d63284448eac1940/modules/mailpit/testcontainers/mailpit/py.typed -------------------------------------------------------------------------------- /modules/memcached/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.memcached.MemcachedContainer 2 | -------------------------------------------------------------------------------- /modules/memcached/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_namespace_packages, setup 2 | 3 | description = "Memcached component of testcontainers-python." 4 | 5 | setup( 6 | name="testcontainers-memcached", 7 | version="0.0.1rc1", 8 | packages=find_namespace_packages(), 9 | description=description, 10 | long_description=description, 11 | long_description_content_type="text/x-rst", 12 | url="https://github.com/testcontainers/testcontainers-python", 13 | install_requires=[ 14 | "testcontainers-core", 15 | ], 16 | python_requires=">=3.7", 17 | ) 18 | -------------------------------------------------------------------------------- /modules/memcached/testcontainers/memcached/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | import socket 14 | 15 | from testcontainers.core.container import DockerContainer 16 | from testcontainers.core.waiting_utils import wait_container_is_ready 17 | 18 | 19 | class MemcachedNotReady(Exception): 20 | pass 21 | 22 | 23 | class MemcachedContainer(DockerContainer): 24 | """ 25 | Test container for Memcached. The example below spins up a Memcached server 26 | 27 | Example: 28 | 29 | .. doctest:: 30 | 31 | >>> from testcontainers.memcached import MemcachedContainer 32 | 33 | >>> with MemcachedContainer() as memcached_container: 34 | ... host, port = memcached_container.get_host_and_port() 35 | """ 36 | 37 | def __init__(self, image="memcached:1", port_to_expose=11211, **kwargs): 38 | super().__init__(image, **kwargs) 39 | self.port_to_expose = port_to_expose 40 | self.with_exposed_ports(port_to_expose) 41 | 42 | @wait_container_is_ready(MemcachedNotReady) 43 | def _connect(self): 44 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 45 | host = self.get_container_host_ip() 46 | port = int(self.get_exposed_port(self.port_to_expose)) 47 | s.connect((host, port)) 48 | s.sendall(b"stats\n\r") 49 | data = s.recv(1024) 50 | if len(data) == 0: 51 | raise MemcachedNotReady("Memcached not ready yet") 52 | 53 | def start(self): 54 | super().start() 55 | self._connect() 56 | return self 57 | 58 | def get_host_and_port(self): 59 | return self.get_container_host_ip(), int(self.get_exposed_port(self.port_to_expose)) 60 | -------------------------------------------------------------------------------- /modules/memcached/tests/test_memcached.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | from testcontainers.memcached import MemcachedContainer 4 | 5 | import pytest 6 | 7 | 8 | def test_memcached_host_and_exposed_port(): 9 | with MemcachedContainer("memcached:1.6-alpine") as memcached: 10 | host, port = memcached.get_host_and_port() 11 | assert host == "localhost" 12 | assert port != 11211 13 | 14 | 15 | @pytest.mark.parametrize("image", ["memcached:1.6-bookworm", "memcached:1.6-alpine"]) 16 | def test_memcached_can_connect_and_retrieve_data(image): 17 | with MemcachedContainer(image) as memcached: 18 | host, port = memcached.get_host_and_port() 19 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 20 | s.connect((host, port)) 21 | s.sendall(b"stats\n\r") 22 | data = s.recv(1024) 23 | assert len(data) > 0, "We should have received some data from memcached" 24 | 25 | pid_stat, uptime_stat, *_ = data.decode().split("\r\n") 26 | 27 | assert pid_stat.startswith("STAT pid") 28 | assert uptime_stat.startswith("STAT uptime") 29 | -------------------------------------------------------------------------------- /modules/milvus/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.milvus.MilvusContainer 2 | .. title:: testcontainers.milvus.MilvusContainer 3 | -------------------------------------------------------------------------------- /modules/milvus/tests/test_milvus.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pymilvus import MilvusClient 3 | 4 | from testcontainers.milvus import MilvusContainer 5 | 6 | VERSIONS = ["v2.4.0", "v2.4.4"] 7 | 8 | 9 | class ClientMilvusContainer(MilvusContainer): 10 | def get_client(self, *, dbname: str = "default", token: str = "root:Milvus") -> MilvusClient: 11 | connection_url = self.get_connection_url() 12 | client = MilvusClient(uri=connection_url, dbname=dbname, token=token) 13 | return client 14 | 15 | 16 | @pytest.mark.parametrize("version", VERSIONS) 17 | def test_run_milvus_success(version: str): 18 | image = f"milvusdb/milvus:{version}" 19 | 20 | with MilvusContainer(image=image) as milvus_container: 21 | exposed_port = milvus_container.get_exposed_port(milvus_container.port) 22 | url = milvus_container.get_connection_url() 23 | 24 | assert url and exposed_port in url 25 | 26 | 27 | @pytest.mark.parametrize("version", VERSIONS) 28 | def test_milvus_client_success(version: str): 29 | image = f"milvusdb/milvus:{version}" 30 | test_collection = "test_collection" 31 | 32 | with ClientMilvusContainer(image=image) as milvus_container: 33 | client = milvus_container.get_client() 34 | client.create_collection(test_collection, dimension=2) 35 | collections = client.list_collections() 36 | assert test_collection in collections 37 | 38 | client.drop_collection(test_collection) 39 | assert not client.has_collection(test_collection) 40 | -------------------------------------------------------------------------------- /modules/minio/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.minio.MinioContainer 2 | .. title:: testcontainers.minio.MinioContainer 3 | -------------------------------------------------------------------------------- /modules/minio/tests/test_minio.py: -------------------------------------------------------------------------------- 1 | import io 2 | 3 | from testcontainers.minio import MinioContainer 4 | 5 | 6 | def test_docker_run_minio(): 7 | config = MinioContainer(access_key="test-access", secret_key="test-secret") 8 | with config as minio: 9 | client = minio.get_client() 10 | client.make_bucket("test") 11 | test_content = b"Hello World" 12 | client.put_object( 13 | "test", 14 | "testfile.txt", 15 | io.BytesIO(test_content), 16 | length=len(test_content), 17 | ) 18 | 19 | assert client.get_object("test", "testfile.txt").data == test_content 20 | assert minio.get_config()["access_key"] == config.access_key 21 | assert minio.get_config()["secret_key"] == config.secret_key 22 | -------------------------------------------------------------------------------- /modules/mongodb/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.mongodb.MongoDbContainer 2 | .. title:: testcontainers.mongodb.MongoDbContainer 3 | -------------------------------------------------------------------------------- /modules/mongodb/testcontainers/mongodb/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | import os 14 | from typing import Optional 15 | 16 | from pymongo import MongoClient 17 | 18 | from testcontainers.core.generic import DbContainer 19 | from testcontainers.core.utils import raise_for_deprecated_parameter 20 | from testcontainers.core.waiting_utils import wait_for_logs 21 | 22 | 23 | class MongoDbContainer(DbContainer): 24 | """ 25 | Mongo document-based database container. 26 | 27 | Example: 28 | 29 | .. doctest:: 30 | 31 | >>> from testcontainers.mongodb import MongoDbContainer 32 | 33 | >>> with MongoDbContainer("mongo:7.0.7") as mongo: 34 | ... db = mongo.get_connection_client().test 35 | ... # Insert a database entry 36 | ... result = db.restaurants.insert_one( 37 | ... { 38 | ... "name": "Vella", 39 | ... "cuisine": "Italian", 40 | ... "restaurant_id": "123456" 41 | ... } 42 | ... ) 43 | ... # Find the restaurant document 44 | ... result = db.restaurants.find_one({"name": "Vella"}) 45 | ... result["restaurant_id"] 46 | '123456' 47 | """ 48 | 49 | def __init__( 50 | self, 51 | image: str = "mongo:latest", 52 | port: int = 27017, 53 | username: Optional[str] = None, 54 | password: Optional[str] = None, 55 | dbname: Optional[str] = None, 56 | **kwargs, 57 | ) -> None: 58 | raise_for_deprecated_parameter(kwargs, "port_to_expose", "port") 59 | super().__init__(image=image, **kwargs) 60 | self.username = username if username else os.environ.get("MONGO_INITDB_ROOT_USERNAME", "test") 61 | self.password = password if password else os.environ.get("MONGO_INITDB_ROOT_PASSWORD", "test") 62 | self.dbname = dbname if dbname else os.environ.get("MONGO_DB", "test") 63 | self.port = port 64 | self.with_exposed_ports(self.port) 65 | 66 | def _configure(self) -> None: 67 | self.with_env("MONGO_INITDB_ROOT_USERNAME", self.username) 68 | self.with_env("MONGO_INITDB_ROOT_PASSWORD", self.password) 69 | self.with_env("MONGO_DB", self.dbname) 70 | 71 | def get_connection_url(self) -> str: 72 | return self._create_connection_url( 73 | dialect="mongodb", 74 | username=self.username, 75 | password=self.password, 76 | port=self.port, 77 | ) 78 | 79 | def _connect(self) -> None: 80 | wait_for_logs(self, "Waiting for connections") 81 | 82 | def get_connection_client(self) -> MongoClient: 83 | return MongoClient(self.get_connection_url()) 84 | -------------------------------------------------------------------------------- /modules/mongodb/tests/test_mongodb.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pymongo import MongoClient 3 | from pymongo.errors import OperationFailure 4 | 5 | from testcontainers.mongodb import MongoDbContainer 6 | 7 | 8 | @pytest.mark.parametrize("version", ["7.0.7", "6.0.14", "5.0.26"]) 9 | def test_docker_run_mongodb(version: str): 10 | with MongoDbContainer(f"mongo:{version}") as mongo: 11 | db = mongo.get_connection_client().test 12 | doc = { 13 | "address": { 14 | "street": "2 Avenue", 15 | "zipcode": "10075", 16 | "building": "1480", 17 | "coord": [-73.9557413, 40.7720266], 18 | }, 19 | "borough": "Manhattan", 20 | "cuisine": "Italian", 21 | "name": "Vella", 22 | "restaurant_id": "41704620", 23 | } 24 | result = db.restaurants.insert_one(doc) 25 | assert result.inserted_id 26 | 27 | cursor = db.restaurants.find({"borough": "Manhattan"}) 28 | assert cursor.next()["restaurant_id"] == doc["restaurant_id"] 29 | 30 | 31 | # This is a feature in the generic DbContainer class 32 | # but it can't be tested on its own 33 | # so is tested in various database modules: 34 | # - mysql / mariadb 35 | # - postgresql 36 | # - sqlserver 37 | # - mongodb 38 | # - db2 39 | def test_quoted_password(): 40 | user = "root" 41 | password = "p@$%25+0&%rd :/!=?" 42 | quoted_password = "p%40%24%2525+0%26%25rd %3A%2F%21%3D%3F" 43 | # driver = "pymongo" 44 | kwargs = { 45 | "username": user, 46 | "password": password, 47 | } 48 | with MongoDbContainer("mongo:7.0.7", **kwargs) as container: 49 | host = container.get_container_host_ip() 50 | port = container.get_exposed_port(27017) 51 | expected_url = f"mongodb://{user}:{quoted_password}@{host}:{port}" 52 | url = container.get_connection_url() 53 | assert url == expected_url 54 | -------------------------------------------------------------------------------- /modules/mqtt/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.mqtt.MosquittoContainer 2 | .. title:: testcontainers.mqtt.MosquittoContainer 3 | -------------------------------------------------------------------------------- /modules/mqtt/testcontainers/mqtt/testcontainers-mosquitto-default-configuration.conf: -------------------------------------------------------------------------------- 1 | # see https://mosquitto.org/man/mosquitto-conf-5.html 2 | 3 | protocol mqtt 4 | user root 5 | log_dest stdout 6 | allow_anonymous true 7 | 8 | log_type error 9 | log_type warning 10 | log_type notice 11 | log_type information 12 | 13 | log_timestamp_format %Y-%m-%d %H:%M:%S 14 | persistence true 15 | persistence_location /data/ 16 | 17 | listener 1883 18 | protocol mqtt 19 | 20 | sys_interval 1 21 | -------------------------------------------------------------------------------- /modules/mqtt/tests/test_mosquitto.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from testcontainers.mqtt import MosquittoContainer 4 | 5 | VERSIONS = ["1.6.15", "2.0.18"] 6 | 7 | 8 | @pytest.mark.parametrize("version", VERSIONS) 9 | def test_mosquitto(version): 10 | with MosquittoContainer(image=f"eclipse-mosquitto:{version}") as container: 11 | external_port = int(container.get_exposed_port(container.MQTT_PORT)) 12 | print(f"listening on port: {external_port}") 13 | 14 | 15 | @pytest.mark.parametrize("version", VERSIONS) 16 | def test_mosquitto_client(version): 17 | with MosquittoContainer(image=f"eclipse-mosquitto:{version}") as container: 18 | container.get_client() 19 | -------------------------------------------------------------------------------- /modules/mssql/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.mssql.SqlServerContainer 2 | .. title:: testcontainers.mssql.SqlServerContainer 3 | -------------------------------------------------------------------------------- /modules/mssql/testcontainers/mssql/__init__.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from typing import Optional 3 | 4 | from testcontainers.core.generic import DbContainer 5 | from testcontainers.core.utils import raise_for_deprecated_parameter 6 | from testcontainers.core.waiting_utils import wait_container_is_ready 7 | 8 | 9 | class SqlServerContainer(DbContainer): 10 | """ 11 | Microsoft SQL Server database container. 12 | 13 | Example: 14 | 15 | .. doctest:: 16 | 17 | >>> import sqlalchemy 18 | >>> from testcontainers.mssql import SqlServerContainer 19 | 20 | >>> with SqlServerContainer("mcr.microsoft.com/mssql/server:2022-CU12-ubuntu-22.04") as mssql: 21 | ... engine = sqlalchemy.create_engine(mssql.get_connection_url()) 22 | ... with engine.begin() as connection: 23 | ... result = connection.execute(sqlalchemy.text("select @@VERSION")) 24 | """ 25 | 26 | def __init__( 27 | self, 28 | image: str = "mcr.microsoft.com/mssql/server:2019-latest", 29 | username: str = "SA", 30 | password: Optional[str] = None, 31 | port: int = 1433, 32 | dbname: str = "tempdb", 33 | dialect: str = "mssql+pymssql", 34 | **kwargs, 35 | ) -> None: 36 | raise_for_deprecated_parameter(kwargs, "user", "username") 37 | super().__init__(image, **kwargs) 38 | 39 | self.port = port 40 | self.with_exposed_ports(self.port) 41 | 42 | self.password = password or environ.get("SQLSERVER_PASSWORD", "1Secure*Password1") 43 | self.username = username 44 | self.dbname = dbname 45 | self.dialect = dialect 46 | 47 | def _configure(self) -> None: 48 | self.with_env("SA_PASSWORD", self.password) 49 | self.with_env("SQLSERVER_USER", self.username) 50 | self.with_env("SQLSERVER_DBNAME", self.dbname) 51 | self.with_env("ACCEPT_EULA", "Y") 52 | 53 | @wait_container_is_ready(AssertionError) 54 | def _connect(self) -> None: 55 | status, _ = self.exec( 56 | ["bash", "-c", '/opt/mssql-tools*/bin/sqlcmd -U "$SQLSERVER_USER" -P "$SA_PASSWORD" -Q \'SELECT 1\' -C'] 57 | ) 58 | assert status == 0, "Cannot run 'SELECT 1': container is not ready" 59 | 60 | def get_connection_url(self) -> str: 61 | return super()._create_connection_url( 62 | dialect=self.dialect, username=self.username, password=self.password, dbname=self.dbname, port=self.port 63 | ) 64 | -------------------------------------------------------------------------------- /modules/mssql/tests/test_mssql.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import sqlalchemy 3 | 4 | from testcontainers.core.utils import is_arm 5 | from testcontainers.mssql import SqlServerContainer 6 | 7 | 8 | @pytest.mark.skipif(is_arm(), reason="mssql container not available for ARM") 9 | @pytest.mark.parametrize("version", ["2022-CU12-ubuntu-22.04", "2019-CU25-ubuntu-20.04"]) 10 | def test_docker_run_mssql(version: str): 11 | with SqlServerContainer(f"mcr.microsoft.com/mssql/server:{version}", password="1Secure*Password2") as mssql: 12 | engine = sqlalchemy.create_engine(mssql.get_connection_url()) 13 | with engine.begin() as connection: 14 | result = connection.execute(sqlalchemy.text("select @@servicename")) 15 | for row in result: 16 | assert row[0] == "MSSQLSERVER" 17 | 18 | 19 | def test_docker_run_azure_sql_edge(): 20 | with SqlServerContainer("mcr.microsoft.com/azure-sql-edge:1.0.7") as mssql: 21 | engine = sqlalchemy.create_engine(mssql.get_connection_url()) 22 | with engine.begin() as connection: 23 | result = connection.execute(sqlalchemy.text("select @@servicename")) 24 | for row in result: 25 | assert row[0] == "MSSQLSERVER" 26 | 27 | 28 | def test_microsoft_changes_the_mssql_tools_folder_name(): 29 | with SqlServerContainer("mcr.microsoft.com/mssql/server:2019-latest") as mssql: 30 | engine = sqlalchemy.create_engine(mssql.get_connection_url()) 31 | with engine.begin() as connection: 32 | result = connection.execute(sqlalchemy.text("select @@servicename")) 33 | for row in result: 34 | assert row[0] == "MSSQLSERVER" 35 | 36 | 37 | # This is a feature in the generic DbContainer class, 38 | # but it can't be tested on its own 39 | def test_quoted_password(): 40 | user = "SA" 41 | # spaces seem to cause issues? 42 | password = "p@$%25+0&%rd:/!=?" 43 | quoted_password = "p%40%24%2525+0%26%25rd%3A%2F%21%3D%3F" 44 | driver = "pymssql" 45 | port = 1433 46 | expected_url = f"mssql+{driver}://{user}:{quoted_password}@localhost:{port}/tempdb" 47 | kwargs = { 48 | "username": user, 49 | "password": password, 50 | } 51 | with ( 52 | SqlServerContainer("mcr.microsoft.com/azure-sql-edge:1.0.7", **kwargs) 53 | .with_env("ACCEPT_EULA", "Y") 54 | .with_env( 55 | "MSSQL_SA_PASSWORD", "{" + password + "}" 56 | ) # special characters have to be quoted in braces in env vars 57 | ) as container: 58 | exposed_port = container.get_exposed_port(container.port) 59 | expected_url = expected_url.replace(f":{port}", f":{exposed_port}") 60 | url = container.get_connection_url() 61 | assert url == expected_url 62 | -------------------------------------------------------------------------------- /modules/mysql/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.mysql.MySqlContainer 2 | .. title:: testcontainers.mysql.MySqlContainer 3 | -------------------------------------------------------------------------------- /modules/mysql/tests/seeds/01-schema.sql: -------------------------------------------------------------------------------- 1 | -- Sample SQL schema, no data 2 | CREATE TABLE `stuff` ( 3 | `id` mediumint NOT NULL AUTO_INCREMENT, 4 | `name` VARCHAR(63) NOT NULL, 5 | PRIMARY KEY (`id`) 6 | ); 7 | -------------------------------------------------------------------------------- /modules/mysql/tests/seeds/02-seeds.sql: -------------------------------------------------------------------------------- 1 | -- Sample data, to be loaded after the schema 2 | INSERT INTO stuff (name) 3 | VALUES ("foo"), ("bar"), ("qux"), ("frob"); 4 | -------------------------------------------------------------------------------- /modules/nats/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.nats.NatsContainer 2 | .. title:: testcontainers.nats.NatsContainer 3 | -------------------------------------------------------------------------------- /modules/nats/testcontainers/nats/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | 14 | 15 | from testcontainers.core.container import DockerContainer 16 | from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs 17 | 18 | 19 | class NatsContainer(DockerContainer): 20 | """ 21 | Nats container. 22 | 23 | Example: 24 | 25 | .. doctest:: 26 | 27 | >>> import asyncio 28 | >>> from nats import connect as nats_connect 29 | >>> from testcontainers.nats import NatsContainer 30 | 31 | >>> async def test_doctest_usage(): 32 | ... with NatsContainer() as nats_container: 33 | ... client = await nats_connect(nats_container.nats_uri()) 34 | ... sub_tc = await client.subscribe("tc") 35 | ... await client.publish("tc", b"Test-Containers") 36 | ... next_message = await sub_tc.next_msg(timeout=5.0) 37 | ... await client.close() 38 | ... return next_message.data 39 | >>> asyncio.run(test_doctest_usage()) 40 | b'Test-Containers' 41 | """ 42 | 43 | def __init__( 44 | self, 45 | image: str = "nats:latest", 46 | client_port: int = 4222, 47 | management_port: int = 8222, 48 | expected_ready_log: str = "Server is ready", 49 | ready_timeout_secs: int = 120, 50 | **kwargs, 51 | ) -> None: 52 | super().__init__(image, **kwargs) 53 | self.client_port = client_port 54 | self.management_port = management_port 55 | self._expected_ready_log = expected_ready_log 56 | self._ready_timeout_secs = max(ready_timeout_secs, 0) 57 | self.with_exposed_ports(self.client_port, self.management_port) 58 | 59 | @wait_container_is_ready() 60 | def _healthcheck(self) -> None: 61 | wait_for_logs(self, self._expected_ready_log, timeout=self._ready_timeout_secs) 62 | 63 | def nats_uri(self) -> str: 64 | return f"nats://{self.get_container_host_ip()}:{self.get_exposed_port(self.client_port)}" 65 | 66 | def nats_host_and_port(self) -> tuple[str, int]: 67 | return self.get_container_host_ip(), self.get_exposed_port(self.client_port) 68 | 69 | def nats_management_uri(self) -> str: 70 | return f"nats://{self.get_container_host_ip()}:{self.get_exposed_port(self.management_port)}" 71 | 72 | def start(self) -> "NatsContainer": 73 | super().start() 74 | self._healthcheck() 75 | return self 76 | -------------------------------------------------------------------------------- /modules/nats/tests/test_nats.py: -------------------------------------------------------------------------------- 1 | from testcontainers.nats import NatsContainer 2 | from uuid import uuid4 3 | import pytest 4 | 5 | from nats import connect as nats_connect 6 | from nats.aio.client import Client as NATSClient 7 | 8 | 9 | async def get_client(container: NatsContainer) -> "NATSClient": 10 | """ 11 | Get a nats client. 12 | 13 | Returns: 14 | client: Nats client to connect to the container. 15 | """ 16 | conn_string = container.nats_uri() 17 | client = await nats_connect(conn_string) 18 | return client 19 | 20 | 21 | def test_basic_container_ops(): 22 | with NatsContainer() as container: 23 | # Not sure how to get type information without doing this 24 | container: NatsContainer = container 25 | h, p = container.nats_host_and_port() 26 | assert h == "localhost" 27 | uri = container.nats_uri() 28 | management_uri = container.nats_management_uri() 29 | 30 | assert uri != management_uri 31 | 32 | 33 | @pytest.mark.asyncio 34 | async def test_pubsub(anyio_backend): 35 | with NatsContainer() as container: 36 | nc: NATSClient = await get_client(container) 37 | 38 | topic = str(uuid4()) 39 | 40 | sub = await nc.subscribe(topic) 41 | sent_message = b"Test-Containers" 42 | await nc.publish(topic, b"Test-Containers") 43 | received_msg = await sub.next_msg() 44 | print("Received:", received_msg) 45 | assert sent_message == received_msg.data 46 | await nc.flush() 47 | await nc.close() 48 | 49 | 50 | @pytest.mark.asyncio 51 | async def test_more_complex_example(anyio_backend): 52 | with NatsContainer() as container: 53 | nc: NATSClient = await get_client(container) 54 | 55 | sub = await nc.subscribe("greet.*") 56 | await nc.publish("greet.joe", b"hello") 57 | 58 | try: 59 | await sub.next_msg(timeout=0.1) 60 | except TimeoutError: 61 | pass 62 | 63 | await nc.publish("greet.joe", b"hello.joe") 64 | await nc.publish("greet.pam", b"hello.pam") 65 | 66 | first = await sub.next_msg(timeout=0.1) 67 | assert b"hello.joe" == first.data 68 | 69 | second = await sub.next_msg(timeout=0.1) 70 | assert b"hello.pam" == second.data 71 | 72 | await nc.publish("greet.bob", b"hello") 73 | 74 | await sub.unsubscribe() 75 | await nc.drain() 76 | 77 | 78 | @pytest.mark.asyncio 79 | async def test_doctest_usage(): 80 | """simpler to run test to mirror what is in the doctest""" 81 | with NatsContainer() as nats_container: 82 | client = await nats_connect(nats_container.nats_uri()) 83 | sub_tc = await client.subscribe("tc") 84 | await client.publish("tc", b"Test-Containers") 85 | next_message = await sub_tc.next_msg(timeout=5.0) 86 | await client.close() 87 | assert next_message.data == b"Test-Containers" 88 | -------------------------------------------------------------------------------- /modules/neo4j/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.neo4j.Neo4jContainer 2 | .. title:: testcontainers.neo4j.Neo4jContainer 3 | -------------------------------------------------------------------------------- /modules/neo4j/testcontainers/neo4j/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | 14 | import os 15 | from typing import Optional 16 | 17 | from neo4j import Driver, GraphDatabase 18 | from testcontainers.core.config import testcontainers_config as c 19 | from testcontainers.core.generic import DbContainer 20 | from testcontainers.core.utils import raise_for_deprecated_parameter 21 | from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs 22 | 23 | 24 | class Neo4jContainer(DbContainer): 25 | """ 26 | Neo4j Graph Database (Standalone) database container. 27 | 28 | Example: 29 | 30 | .. doctest:: 31 | 32 | >>> from testcontainers.neo4j import Neo4jContainer 33 | 34 | >>> with Neo4jContainer() as neo4j, \\ 35 | ... neo4j.get_driver() as driver, \\ 36 | ... driver.session() as session: 37 | ... result = session.run("MATCH (n) RETURN n LIMIT 1") 38 | ... record = result.single() 39 | """ 40 | 41 | def __init__( 42 | self, 43 | image: str = "neo4j:latest", 44 | port: int = 7687, 45 | password: Optional[str] = None, 46 | username: Optional[str] = None, 47 | **kwargs, 48 | ) -> None: 49 | raise_for_deprecated_parameter(kwargs, "bolt_port", "port") 50 | super().__init__(image, **kwargs) 51 | self.username = username or os.environ.get("NEO4J_USER", "neo4j") 52 | self.password = password or os.environ.get("NEO4J_PASSWORD", "password") 53 | self.port = port 54 | self.with_exposed_ports(self.port) 55 | self._driver = None 56 | 57 | def _configure(self) -> None: 58 | self.with_env("NEO4J_AUTH", f"neo4j/{self.password}") 59 | 60 | def get_connection_url(self) -> str: 61 | return f"bolt://{self.get_container_host_ip()}:{self.get_exposed_port(self.port)}" 62 | 63 | @wait_container_is_ready() 64 | def _connect(self) -> None: 65 | wait_for_logs(self, "Remote interface available at", c.timeout) 66 | 67 | # Then we actually check that the container really is listening 68 | with self.get_driver() as driver: 69 | # Drivers may or may not be lazy 70 | # force them to do a round trip to confirm neo4j is working 71 | driver.verify_connectivity() 72 | 73 | def get_driver(self, **kwargs) -> Driver: 74 | return GraphDatabase.driver(self.get_connection_url(), auth=(self.username, self.password), **kwargs) 75 | -------------------------------------------------------------------------------- /modules/neo4j/tests/test_neo4j.py: -------------------------------------------------------------------------------- 1 | from testcontainers.neo4j import Neo4jContainer 2 | 3 | 4 | def test_docker_run_neo4j_latest(): 5 | with Neo4jContainer() as neo4j, neo4j.get_driver() as driver, driver.session() as session: 6 | result = session.run( 7 | """ 8 | CALL dbms.components() 9 | YIELD name, versions, edition 10 | UNWIND versions as version 11 | RETURN name, version, edition 12 | """ 13 | ) 14 | record = result.single() 15 | assert record["name"].startswith("Neo4j") 16 | -------------------------------------------------------------------------------- /modules/nginx/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.nginx.NginxContainer 2 | .. title:: testcontainers.nginx.NginxContainer 3 | -------------------------------------------------------------------------------- /modules/nginx/testcontainers/nginx/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | import urllib.error 14 | import urllib.parse 15 | import urllib.request 16 | 17 | from testcontainers.core.container import DockerContainer 18 | from testcontainers.core.utils import raise_for_deprecated_parameter 19 | from testcontainers.core.waiting_utils import wait_container_is_ready 20 | 21 | 22 | class NginxContainer(DockerContainer): 23 | def __init__(self, image: str = "nginx:latest", port: int = 80, **kwargs) -> None: 24 | raise_for_deprecated_parameter(kwargs, "port_to_expose", "port") 25 | super().__init__(image, **kwargs) 26 | self.port = port 27 | self.with_exposed_ports(self.port) 28 | 29 | def start(self) -> "NginxContainer": 30 | super().start() 31 | 32 | host = self.get_container_host_ip() 33 | port = str(self.get_exposed_port(self.port)) 34 | self._connect(host, port) 35 | 36 | return self 37 | 38 | @wait_container_is_ready(urllib.error.URLError) 39 | def _connect(self, host: str, port: str) -> None: 40 | url = urllib.parse.urlunsplit(("http", f"{host}:{port}", "", "", "")) 41 | urllib.request.urlopen(url, timeout=1) 42 | -------------------------------------------------------------------------------- /modules/nginx/tests/test_nginx.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | from testcontainers.nginx import NginxContainer 4 | 5 | 6 | def test_docker_run_nginx(): 7 | nginx_container = NginxContainer("nginx:1.13.8") 8 | with nginx_container as nginx: 9 | url = f"http://{nginx.get_container_host_ip()}:{nginx.get_exposed_port(nginx.port)}/" 10 | r = requests.get(url) 11 | assert r.status_code == 200 12 | assert "Welcome to nginx!" in r.text 13 | -------------------------------------------------------------------------------- /modules/ollama/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.ollama.OllamaContainer 2 | .. title:: testcontainers.ollama.OllamaContainer 3 | -------------------------------------------------------------------------------- /modules/ollama/tests/test_ollama.py: -------------------------------------------------------------------------------- 1 | import random 2 | import string 3 | from pathlib import Path 4 | 5 | import requests 6 | from testcontainers.ollama import OllamaContainer 7 | 8 | 9 | def random_string(length=6): 10 | return "".join(random.choices(string.ascii_lowercase, k=length)) 11 | 12 | 13 | def test_ollama_container(): 14 | with OllamaContainer() as ollama: 15 | url = ollama.get_endpoint() 16 | response = requests.get(url) 17 | assert response.status_code == 200 18 | assert response.text == "Ollama is running" 19 | 20 | 21 | def test_with_default_config(): 22 | with OllamaContainer("ollama/ollama:0.1.26") as ollama: 23 | ollama.start() 24 | response = requests.get(f"{ollama.get_endpoint()}/api/version") 25 | version = response.json().get("version") 26 | assert version == "0.1.26" 27 | 28 | 29 | def test_download_model_and_commit_to_image(): 30 | new_image_name = f"tc-ollama-allminilm-{random_string(length=4).lower()}" 31 | with OllamaContainer("ollama/ollama:0.1.26") as ollama: 32 | ollama.start() 33 | # Pull the model 34 | ollama.pull_model("all-minilm") 35 | 36 | response = requests.get(f"{ollama.get_endpoint()}/api/tags") 37 | model_name = ollama.list_models()[0].get("name") 38 | assert "all-minilm" in model_name 39 | 40 | # Commit the container state to a new image 41 | ollama.commit_to_image(new_image_name) 42 | 43 | # Verify the new image 44 | with OllamaContainer(new_image_name) as ollama: 45 | ollama.start() 46 | response = requests.get(f"{ollama.get_endpoint()}/api/tags") 47 | model_name = response.json().get("models", [])[0].get("name") 48 | assert "all-minilm" in model_name 49 | 50 | 51 | def test_models_saved_in_folder(tmp_path: Path): 52 | with OllamaContainer("ollama/ollama:0.1.26", ollama_home=tmp_path) as ollama: 53 | assert len(ollama.list_models()) == 0 54 | ollama.pull_model("all-minilm") 55 | assert len(ollama.list_models()) == 1 56 | assert "all-minilm" in ollama.list_models()[0].get("name") 57 | 58 | with OllamaContainer("ollama/ollama:0.1.26", ollama_home=tmp_path) as ollama: 59 | assert len(ollama.list_models()) == 1 60 | assert "all-minilm" in ollama.list_models()[0].get("name") 61 | -------------------------------------------------------------------------------- /modules/opensearch/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.opensearch.OpenSearchContainer 2 | .. title:: testcontainers.opensearch.OpenSearchContainer 3 | -------------------------------------------------------------------------------- /modules/opensearch/tests/test_opensearch.py: -------------------------------------------------------------------------------- 1 | from testcontainers.opensearch import OpenSearchContainer 2 | 3 | import pytest 4 | 5 | 6 | @pytest.fixture(autouse=True) 7 | def disable_logging(): 8 | import logging 9 | import warnings 10 | 11 | warnings.filterwarnings("ignore") 12 | logging.getLogger("opensearch").setLevel(logging.CRITICAL) 13 | 14 | yield 15 | warnings.resetwarnings() 16 | logging.getLogger("opensearch").setLevel(logging.NOTSET) 17 | 18 | 19 | def test_docker_run_opensearch(): 20 | with OpenSearchContainer() as opensearch: 21 | client = opensearch.get_client() 22 | assert client.cluster.health()["status"] == "green" 23 | 24 | 25 | def test_docker_run_opensearch_with_security(): 26 | with OpenSearchContainer(security_enabled=True) as opensearch: 27 | client = opensearch.get_client() 28 | assert client.cluster.health()["status"] == "green" 29 | 30 | 31 | def test_docker_run_opensearch_v1(): 32 | with OpenSearchContainer(image="opensearchproject/opensearch:1.3.6") as opensearch: 33 | client = opensearch.get_client() 34 | assert client.cluster.health()["status"] == "green" 35 | 36 | 37 | def test_docker_run_opensearch_v1_with_security(): 38 | with OpenSearchContainer(image="opensearchproject/opensearch:1.3.6", security_enabled=True) as opensearch: 39 | client = opensearch.get_client() 40 | assert client.cluster.health()["status"] == "green" 41 | 42 | 43 | def test_docker_run_opensearch_v2_12(): 44 | with OpenSearchContainer( 45 | image="opensearchproject/opensearch:2.12.0", initial_admin_password="Testing!#345" 46 | ) as opensearch: 47 | client = opensearch.get_client() 48 | assert client.cluster.health()["status"] == "green" 49 | 50 | 51 | def test_search(): 52 | with OpenSearchContainer() as opensearch: 53 | client = opensearch.get_client() 54 | client.index(index="test", body={"test": "test"}) 55 | client.indices.refresh(index="test") 56 | result = client.search(index="test", body={"query": {"match_all": {}}}) 57 | assert result["hits"]["total"]["value"] == 1 58 | -------------------------------------------------------------------------------- /modules/oracle-free/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.oracle.OracleDbContainer 2 | .. title:: testcontainers.oracle.OracleDbContainer 3 | -------------------------------------------------------------------------------- /modules/oracle-free/testcontainers/oracle/__init__.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from secrets import randbits 3 | from typing import Optional 4 | 5 | from testcontainers.core.generic import DbContainer 6 | from testcontainers.core.waiting_utils import wait_for_logs 7 | 8 | 9 | class OracleDbContainer(DbContainer): 10 | """ 11 | Oracle database container. 12 | 13 | Example: 14 | 15 | .. doctest:: 16 | 17 | >>> import sys, pytest 18 | >>> if sys.platform.startswith('win') or sys.platform == 'darwin': 19 | ... pytest.skip("linux only test") 20 | 21 | >>> import sqlalchemy 22 | >>> from testcontainers.oracle import OracleDbContainer 23 | 24 | >>> with OracleDbContainer() as oracle: 25 | ... engine = sqlalchemy.create_engine(oracle.get_connection_url()) 26 | ... with engine.begin() as connection: 27 | ... result = connection.execute(sqlalchemy.text("SELECT 1 FROM dual")) 28 | ... result.fetchall() 29 | [(1,)] 30 | """ 31 | 32 | def __init__( 33 | self, 34 | image: str = "gvenzl/oracle-free:slim", 35 | oracle_password: Optional[str] = None, 36 | username: Optional[str] = None, 37 | password: Optional[str] = None, 38 | port: int = 1521, 39 | dbname: Optional[str] = None, 40 | **kwargs, 41 | ) -> None: 42 | super().__init__(image=image, **kwargs) 43 | 44 | self.port = port 45 | self.with_exposed_ports(self.port) 46 | 47 | self.oracle_password = oracle_password or environ.get("ORACLE_PASSWORD") or hex(randbits(24)) 48 | self.username = username or environ.get("APP_USER") 49 | self.password = password or environ.get("APP_USER_PASSWORD") 50 | self.dbname = dbname or environ.get("ORACLE_DATABASE") 51 | 52 | def get_connection_url(self) -> str: 53 | return super()._create_connection_url( 54 | dialect="oracle+oracledb", 55 | username=self.username or "system", 56 | password=self.password or self.oracle_password, 57 | port=self.port, 58 | ) + "/?service_name={}".format(self.dbname or "FREEPDB1") 59 | # Default DB is "FREEPDB1" 60 | 61 | def _connect(self) -> None: 62 | wait_for_logs(self, "DATABASE IS READY TO USE!") 63 | 64 | def _configure(self) -> None: 65 | # if self.oracle_password is not None: 66 | # self.with_env("ORACLE_PASSWORD", self.oracle_password) 67 | # # Either ORACLE_PASSWORD or ORACLE_RANDOM_PASSWORD need to be passed on 68 | # else: 69 | # self.with_env("ORACLE_RANDOM_PASSWORD", "y") 70 | # this module is unusable with a random password 71 | self.with_env("ORACLE_PASSWORD", self.oracle_password) 72 | 73 | if self.username is not None: 74 | self.with_env("APP_USER", self.username) 75 | if self.password is not None: 76 | self.with_env("APP_USER_PASSWORD", self.password) 77 | 78 | # FREE and FREEPDB1 are predefined databases, do not pass them on as ORACLE_DATABASE 79 | if self.dbname is not None and self.dbname.upper() not in ("FREE", "FREEPDB1"): 80 | self.with_env("ORACLE_DATABASE", self.dbname) 81 | -------------------------------------------------------------------------------- /modules/postgres/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.postgres.PostgresContainer 2 | .. title:: testcontainers.postgres.PostgresContainer 3 | -------------------------------------------------------------------------------- /modules/postgres/tests/fixtures/postgres_create_example_table.sql: -------------------------------------------------------------------------------- 1 | create table example 2 | ( 3 | id serial not null primary key, 4 | name varchar(255) not null unique, 5 | description text null 6 | ); 7 | -------------------------------------------------------------------------------- /modules/qdrant/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.qdrant.QdrantContainer 2 | .. title:: testcontainers.qdrant.QdrantContainer 3 | -------------------------------------------------------------------------------- /modules/qdrant/tests/test_config.yaml: -------------------------------------------------------------------------------- 1 | # Qdrant image configuration file for testing 2 | # Reference: https://qdrant.tech/documentation/guides/configuration/#configuration-file-example 3 | log_level: INFO 4 | 5 | service: 6 | api_key: "SOME_TEST_KEY" 7 | -------------------------------------------------------------------------------- /modules/qdrant/tests/test_qdrant.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from testcontainers.qdrant import QdrantContainer 3 | import uuid 4 | from grpc import RpcError 5 | from pathlib import Path 6 | 7 | import qdrant_client 8 | 9 | 10 | def test_docker_run_qdrant(): 11 | with QdrantContainer() as qdrant: 12 | client = qdrant.get_client() 13 | collections = client.get_collections().collections 14 | assert len(collections) == 0 15 | 16 | client = qdrant.get_client(prefer_grpc=True) 17 | collections = client.get_collections().collections 18 | assert len(collections) == 0 19 | 20 | 21 | def test_qdrant_with_api_key_http(): 22 | api_key = uuid.uuid4().hex 23 | 24 | with QdrantContainer(api_key=api_key) as qdrant: 25 | with pytest.raises(qdrant_client.http.exceptions.UnexpectedResponse) as e: 26 | # Construct a client without an API key 27 | qdrant_client.QdrantClient(location=f"http://{qdrant.rest_host_address}").get_collections() 28 | 29 | assert "Must provide an API key" in str(e.value) 30 | 31 | # Construct a client with an API key 32 | collections = ( 33 | qdrant_client.QdrantClient(location=f"http://{qdrant.rest_host_address}", api_key=api_key) 34 | .get_collections() 35 | .collections 36 | ) 37 | 38 | assert len(collections) == 0 39 | 40 | # Get an automatically configured client instance 41 | collections = qdrant.get_client().get_collections().collections 42 | 43 | assert len(collections) == 0 44 | 45 | 46 | def test_qdrant_with_api_key_grpc(): 47 | api_key = uuid.uuid4().hex 48 | 49 | with QdrantContainer(api_key=api_key) as qdrant: 50 | with pytest.raises(RpcError) as e: 51 | qdrant_client.QdrantClient( 52 | url=f"http://{qdrant.grpc_host_address}", 53 | grpc_port=qdrant.exposed_grpc_port, 54 | prefer_grpc=True, 55 | ).get_collections() 56 | 57 | assert "Must provide an API key" in str(e.value) 58 | 59 | collections = ( 60 | qdrant_client.QdrantClient( 61 | url=f"http://{qdrant.grpc_host_address}", 62 | grpc_port=qdrant.exposed_grpc_port, 63 | prefer_grpc=True, 64 | api_key=api_key, 65 | ) 66 | .get_collections() 67 | .collections 68 | ) 69 | 70 | assert len(collections) == 0 71 | 72 | 73 | def test_qdrant_with_config_file(): 74 | config_file_path = Path(__file__).with_name("test_config.yaml") 75 | 76 | with QdrantContainer(config_file_path=config_file_path) as qdrant: 77 | with pytest.raises(qdrant_client.http.exceptions.UnexpectedResponse) as e: 78 | qdrant_client.QdrantClient(location=f"http://{qdrant.rest_host_address}").get_collections() 79 | 80 | assert "Must provide an API key" in str(e.value) 81 | -------------------------------------------------------------------------------- /modules/rabbitmq/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.rabbitmq.RabbitMqContainer 2 | .. title:: testcontainers.rabbitmq.RabbitMqContainer 3 | -------------------------------------------------------------------------------- /modules/rabbitmq/tests/test_rabbitmq.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import Optional 3 | 4 | import pika 5 | import pytest 6 | 7 | from testcontainers.rabbitmq import RabbitMqContainer 8 | 9 | QUEUE = "test-q" 10 | EXCHANGE = "test-exchange" 11 | ROUTING_KEY = "test-route-key" 12 | MESSAGE = {"hello": "world"} 13 | 14 | 15 | @pytest.mark.parametrize( 16 | argnames=["port", "username", "password", "vhost"], 17 | argvalues=[ 18 | [None, None, None, None], # use the defaults 19 | [5673, None, None, None], # test with custom port 20 | [None, "my_test_user", "my_secret_password", None], # test with custom credentials 21 | [None, None, None, "vhost"], # test with custom vhost 22 | ], 23 | ) 24 | def test_docker_run_rabbitmq( 25 | port: Optional[int], username: Optional[str], password: Optional[str], vhost: Optional[str] 26 | ): 27 | """Run rabbitmq test container and use it to deliver a simple message.""" 28 | kwargs = {} 29 | if port is not None: 30 | kwargs["port"] = port 31 | if username is not None: 32 | kwargs["username"] = username 33 | if password is not None: 34 | kwargs["password"] = password 35 | if vhost is not None: 36 | kwargs["vhost"] = vhost 37 | 38 | rabbitmq_container = RabbitMqContainer("rabbitmq:latest", **kwargs) 39 | with rabbitmq_container as rabbitmq: 40 | # connect to rabbitmq: 41 | connection_params = rabbitmq.get_connection_params() 42 | connection = pika.BlockingConnection(connection_params) 43 | 44 | # create exchange and queue: 45 | channel = connection.channel() 46 | channel.exchange_declare(exchange=EXCHANGE, exchange_type="topic") 47 | channel.queue_declare(QUEUE, arguments={}) 48 | channel.queue_bind(QUEUE, EXCHANGE, ROUTING_KEY) 49 | 50 | # publish message: 51 | encoded_message = json.dumps(MESSAGE) 52 | channel.basic_publish(EXCHANGE, ROUTING_KEY, body=encoded_message) 53 | 54 | _, _, body = channel.basic_get(queue=QUEUE) 55 | received_message = json.loads(body.decode()) 56 | assert received_message == MESSAGE 57 | -------------------------------------------------------------------------------- /modules/redis/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.redis.RedisContainer 2 | .. title:: testcontainers.redis.RedisContainer 3 | -------------------------------------------------------------------------------- /modules/redis/tests/test_redis.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from testcontainers.redis import RedisContainer, AsyncRedisContainer 4 | import pytest 5 | 6 | 7 | def test_docker_run_redis(): 8 | config = RedisContainer() 9 | with config as redis: 10 | client = redis.get_client() 11 | p = client.pubsub() 12 | p.subscribe("test") 13 | client.publish("test", "new_msg") 14 | msg = wait_for_message(p) 15 | assert "data" in msg 16 | assert b"new_msg", msg["data"] 17 | 18 | 19 | def test_docker_run_redis_with_password(): 20 | config = RedisContainer(password="mypass") 21 | with config as redis: 22 | client = redis.get_client(decode_responses=True) 23 | client.set("hello", "world") 24 | assert client.get("hello") == "world" 25 | 26 | 27 | pytest.mark.usefixtures("anyio_backend") 28 | 29 | 30 | @pytest.mark.parametrize("anyio_backend", ["asyncio"]) 31 | async def test_key_set_in_async_redis(anyio_backend): 32 | with AsyncRedisContainer() as container: 33 | async_redis_client: redis.Redis = await container.get_async_client(decode_responses=True) 34 | key = "key" 35 | expected_value = 1 36 | await async_redis_client.set(key, expected_value) 37 | actual_value = await async_redis_client.get(key) 38 | assert int(actual_value) == expected_value 39 | 40 | 41 | pytest.mark.usefixtures("anyio_backend") 42 | 43 | 44 | @pytest.mark.parametrize("anyio_backend", ["asyncio"]) 45 | @pytest.mark.skip(reason="Need to sort out async pub/sub") 46 | async def test_docker_run_async_redis(anyio_backend): 47 | config = AsyncRedisContainer() 48 | with config as container: 49 | client: redis.Redis = await container.get_async_client(decode_responses=True) 50 | p = await client.pubsub() 51 | await p.subscribe("test") 52 | await client.publish("test", "new_msg") 53 | msg = wait_for_message(p) 54 | assert "data" in msg 55 | assert b"new_msg", msg["data"] 56 | 57 | 58 | pytest.mark.usefixtures("anyio_backend") 59 | 60 | 61 | @pytest.mark.parametrize("anyio_backend", ["asyncio"]) 62 | async def test_docker_run_async_redis_with_password(anyio_backend): 63 | config = AsyncRedisContainer(password="mypass") 64 | with config as container: 65 | client: redis.Redis = await container.get_async_client(decode_responses=True) 66 | await client.set("hello", "world") 67 | assert await client.get("hello") == "world" 68 | 69 | 70 | def wait_for_message(pubsub, timeout=1, ignore_subscribe_messages=True): 71 | now = time.time() 72 | timeout = now + timeout 73 | while now < timeout: 74 | message = pubsub.get_message(ignore_subscribe_messages=ignore_subscribe_messages) 75 | if message is not None: 76 | return message 77 | time.sleep(0.01) 78 | now = time.time() 79 | return None 80 | -------------------------------------------------------------------------------- /modules/registry/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.registry.DockerRegistryContainer 2 | 3 | When building Docker containers with Docker Buildx there is currently no option to test your containers locally without 4 | a local registry. Otherwise Buildx pushes your image to Docker Hub, which is not what you want in a test case. More 5 | and more you need to use Buildx for efficiently building images and especially multi arch images. 6 | 7 | When you use Docker Python libraries like docker-py or python-on-whales to build and test Docker images, what a lot of 8 | persons and DevOps engineers like me nowadays do, a test container comes in very handy. 9 | -------------------------------------------------------------------------------- /modules/registry/testcontainers/registry/__init__.py: -------------------------------------------------------------------------------- 1 | import time 2 | from io import BytesIO 3 | from tarfile import TarFile, TarInfo 4 | from typing import TYPE_CHECKING, Optional 5 | 6 | import bcrypt 7 | from requests import get 8 | from requests.auth import HTTPBasicAuth 9 | from requests.exceptions import ConnectionError, ReadTimeout 10 | 11 | from testcontainers.core.container import DockerContainer 12 | from testcontainers.core.waiting_utils import wait_container_is_ready 13 | 14 | if TYPE_CHECKING: 15 | from requests import Response 16 | 17 | 18 | class DockerRegistryContainer(DockerContainer): 19 | # https://docs.docker.com/registry/ 20 | credentials_path: str = "/htpasswd/credentials.txt" 21 | 22 | def __init__( 23 | self, 24 | image: str = "registry:2", 25 | port: int = 5000, 26 | username: Optional[str] = None, 27 | password: Optional[str] = None, 28 | **kwargs, 29 | ) -> None: 30 | super().__init__(image=image, **kwargs) 31 | self.port: int = port 32 | self.username: Optional[str] = username 33 | self.password: Optional[str] = password 34 | self.with_exposed_ports(self.port) 35 | 36 | def _copy_credentials(self) -> None: 37 | # Create credentials and write them to the container 38 | hashed_password: str = bcrypt.hashpw( 39 | self.password.encode("utf-8"), 40 | bcrypt.gensalt(rounds=12, prefix=b"2a"), 41 | ).decode("utf-8") 42 | content: bytes = f"{self.username}:{hashed_password}".encode("utf-8") # noqa: UP012 43 | 44 | with BytesIO() as tar_archive_object, TarFile(fileobj=tar_archive_object, mode="w") as tmp_tarfile: 45 | tarinfo: TarInfo = TarInfo(name=self.credentials_path) 46 | tarinfo.size = len(content) 47 | tarinfo.mtime = time.time() 48 | 49 | tmp_tarfile.addfile(tarinfo, BytesIO(content)) 50 | tar_archive_object.seek(0) 51 | self.get_wrapped_container().put_archive("/", tar_archive_object) 52 | 53 | @wait_container_is_ready(ConnectionError, ReadTimeout) 54 | def _readiness_probe(self) -> None: 55 | url: str = f"http://{self.get_registry()}/v2" 56 | if self.username and self.password: 57 | response: Response = get(url, auth=HTTPBasicAuth(self.username, self.password), timeout=1) 58 | else: 59 | response: Response = get(url, timeout=1) 60 | response.raise_for_status() 61 | 62 | def start(self): 63 | if self.username and self.password: 64 | self.with_env("REGISTRY_AUTH_HTPASSWD_REALM", "local-registry") 65 | self.with_env("REGISTRY_AUTH_HTPASSWD_PATH", self.credentials_path) 66 | super().start() 67 | self._copy_credentials() 68 | else: 69 | super().start() 70 | 71 | self._readiness_probe() 72 | return self 73 | 74 | def get_registry(self) -> str: 75 | host: str = self.get_container_host_ip() 76 | port: str = self.get_exposed_port(self.port) 77 | return f"{host}:{port}" 78 | -------------------------------------------------------------------------------- /modules/registry/tests/test_registry.py: -------------------------------------------------------------------------------- 1 | from requests import Response, get 2 | from requests.auth import HTTPBasicAuth 3 | from testcontainers.registry import DockerRegistryContainer 4 | 5 | 6 | REGISTRY_USERNAME: str = "foo" 7 | REGISTRY_PASSWORD: str = "bar" 8 | 9 | 10 | def test_registry(): 11 | with DockerRegistryContainer().with_bind_ports(5000, 5000) as registry_container: 12 | url: str = f"http://{registry_container.get_registry()}/v2/_catalog" 13 | 14 | response: Response = get(url) 15 | 16 | assert response.status_code == 200 17 | 18 | 19 | def test_registry_with_authentication(): 20 | with DockerRegistryContainer(username=REGISTRY_USERNAME, password=REGISTRY_PASSWORD).with_bind_ports( 21 | 5000, 5000 22 | ) as registry_container: 23 | url: str = f"http://{registry_container.get_registry()}/v2/_catalog" 24 | 25 | response: Response = get(url, auth=HTTPBasicAuth(REGISTRY_USERNAME, REGISTRY_PASSWORD)) 26 | 27 | assert response.status_code == 200 28 | -------------------------------------------------------------------------------- /modules/scylla/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.scylla.ScyllaContainer 2 | .. title:: testcontainers.scylla.ScyllaContainer 3 | -------------------------------------------------------------------------------- /modules/scylla/testcontainers/scylla/__init__.py: -------------------------------------------------------------------------------- 1 | from testcontainers.core.config import MAX_TRIES 2 | from testcontainers.core.generic import DockerContainer 3 | from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs 4 | 5 | 6 | class ScyllaContainer(DockerContainer): 7 | """ 8 | Scylla database container. 9 | 10 | Example 11 | ------- 12 | .. doctest:: 13 | 14 | >>> from testcontainers.scylla import ScyllaContainer 15 | 16 | >>> with ScyllaContainer() as scylla: 17 | ... cluster = scylla.get_cluster() 18 | ... with cluster.connect() as session: 19 | ... result = session.execute( 20 | ... "CREATE KEYSPACE keyspace1 WITH replication " 21 | ... "= {'class': 'SimpleStrategy', 'replication_factor': '1'};") 22 | """ 23 | 24 | def __init__(self, image="scylladb/scylla:latest", ports_to_expose=(9042,)): 25 | super().__init__(image) 26 | self.ports_to_expose = ports_to_expose 27 | self.with_exposed_ports(*self.ports_to_expose) 28 | self.with_command("--skip-wait-for-gossip-to-settle=0") 29 | 30 | @wait_container_is_ready(OSError) 31 | def _connect(self): 32 | wait_for_logs(self, predicate="Starting listening for CQL clients", timeout=MAX_TRIES) 33 | cluster = self.get_cluster() 34 | cluster.connect() 35 | 36 | def start(self): 37 | super().start() 38 | self._connect() 39 | return self 40 | 41 | def get_cluster(self, **kwargs): 42 | from cassandra.cluster import Cluster 43 | 44 | hostname = self.get_container_host_ip() 45 | port = self.get_exposed_port(9042) 46 | return Cluster(contact_points=[hostname], port=port, **kwargs) 47 | -------------------------------------------------------------------------------- /modules/scylla/tests/test_scylla.py: -------------------------------------------------------------------------------- 1 | from testcontainers.scylla import ScyllaContainer 2 | 3 | 4 | def test_docker_run_scylla(): 5 | with ScyllaContainer() as scylla: 6 | cluster = scylla.get_cluster() 7 | with cluster.connect() as session: 8 | session.execute( 9 | "CREATE KEYSPACE keyspace1 WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'};" 10 | ) 11 | session.execute("CREATE TABLE keyspace1.table1 (key1 int, key2 int, PRIMARY KEY (key1));") 12 | session.execute("INSERT INTO keyspace1.table1 (key1,key2) values (1,2);") 13 | 14 | response = session.execute("SELECT * FROM keyspace1.table1") 15 | 16 | assert response.one().key1 == 1 17 | assert response.one().key2 == 2 18 | -------------------------------------------------------------------------------- /modules/selenium/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.selenium.BrowserWebDriverContainer 2 | .. title:: testcontainers.selenium.BrowserWebDriverContainer 3 | -------------------------------------------------------------------------------- /modules/selenium/testcontainers/selenium/video.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | from typing import Optional 14 | 15 | from testcontainers.core.container import DockerContainer 16 | 17 | VIDEO_DEFAULT_IMAGE = "selenium/video:ffmpeg-6.1-20240402" 18 | 19 | 20 | class SeleniumVideoContainer(DockerContainer): 21 | """ 22 | Selenium video container. 23 | """ 24 | 25 | def __init__(self, image: Optional[str] = None, **kwargs) -> None: 26 | self.image = image or VIDEO_DEFAULT_IMAGE 27 | super().__init__(image=self.image, **kwargs) 28 | 29 | def set_video_name(self, video_name: str) -> "DockerContainer": 30 | self.with_env("FILE_NAME", video_name) 31 | return self 32 | 33 | def set_videos_host_path(self, host_path: str) -> "DockerContainer": 34 | self.with_volume_mapping(host_path, "/videos", "rw") 35 | return self 36 | 37 | def set_selenium_container_host(self, host: str) -> "DockerContainer": 38 | self.with_env("DISPLAY_CONTAINER_NAME", host) 39 | return self 40 | -------------------------------------------------------------------------------- /modules/selenium/tests/test_selenium.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | from pathlib import Path 4 | 5 | import pytest 6 | from selenium.webdriver import DesiredCapabilities 7 | from selenium.webdriver.common.by import By 8 | 9 | from testcontainers.core.utils import is_arm 10 | from testcontainers.selenium import BrowserWebDriverContainer 11 | 12 | 13 | @pytest.mark.parametrize("caps", [DesiredCapabilities.CHROME, DesiredCapabilities.FIREFOX]) 14 | def test_webdriver_container_container(caps): 15 | if is_arm(): 16 | pytest.skip("https://github.com/SeleniumHQ/docker-selenium/issues/1076") 17 | 18 | with BrowserWebDriverContainer(caps).maybe_emulate_amd64() as chrome: 19 | webdriver = chrome.get_driver() 20 | webdriver.get("http://example.com") 21 | header = webdriver.find_element(By.TAG_NAME, "h1").text 22 | assert header == "Example Domain" 23 | 24 | 25 | def test_selenium_custom_image(): 26 | image = "selenium/standalone-chrome:latest" 27 | chrome = BrowserWebDriverContainer(DesiredCapabilities.CHROME, image=image) 28 | assert "image" in dir(chrome), "`image` attribute was not instantialized." 29 | assert chrome.image == image, "`image` attribute was not set to the user provided value" 30 | 31 | 32 | @pytest.mark.parametrize("caps", [DesiredCapabilities.CHROME, DesiredCapabilities.FIREFOX]) 33 | def test_selenium_video(caps, workdir): 34 | video_path = workdir / Path("video.mp4") 35 | with BrowserWebDriverContainer(caps).with_video(video_path=video_path) as chrome: 36 | chrome.get_driver().get("https://google.com") 37 | 38 | assert video_path.exists(), "Selenium video file does not exist" 39 | 40 | 41 | @pytest.fixture 42 | def workdir() -> Path: 43 | tmpdir = tempfile.TemporaryDirectory() 44 | # Enable write permissions for the Docker user container. 45 | os.chmod(tmpdir.name, 0o777) 46 | yield Path(tmpdir.name) 47 | tmpdir.cleanup() 48 | -------------------------------------------------------------------------------- /modules/sftp/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.sftp.SFTPContainer 2 | .. autoclass:: testcontainers.sftp.SFTPUser 3 | .. title:: testcontainers.sftp.SFTPContainer 4 | -------------------------------------------------------------------------------- /modules/sftp/testcontainers/sftp/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/testcontainers/testcontainers-python/5f34ad0e934a83b49c14b5b0d63284448eac1940/modules/sftp/testcontainers/sftp/py.typed -------------------------------------------------------------------------------- /modules/test_module_import/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.test_module_import.NewSubModuleContainer 2 | .. title:: testcontainers.test_module_import.NewSubModuleContainer 3 | -------------------------------------------------------------------------------- /modules/test_module_import/testcontainers/test_module_import/__init__.py: -------------------------------------------------------------------------------- 1 | from .new_sub_module import NewSubModuleContainer # noqa: F401 2 | -------------------------------------------------------------------------------- /modules/test_module_import/testcontainers/test_module_import/new_sub_module.py: -------------------------------------------------------------------------------- 1 | from testcontainers.generic.server import ServerContainer 2 | 3 | 4 | class NewSubModuleContainer(ServerContainer): 5 | """ 6 | This class is a mock container for testing purposes. It is used to test importing from other modules. 7 | 8 | .. doctest:: 9 | 10 | >>> import httpx 11 | >>> from testcontainers.core.image import DockerImage 12 | >>> from testcontainers.test_module_import import NewSubModuleContainer 13 | 14 | >>> with DockerImage(path="./modules/generic/tests/samples/python_server", tag="test-new-mod:latest") as image: 15 | ... with NewSubModuleContainer(port=9000, image=image) as new_mod: 16 | ... url = new_mod._create_connection_url() 17 | ... response = httpx.get(f"{url}", timeout=5) 18 | ... assert response.status_code == 200, "Response status code is not 200" 19 | ... assert new_mod.additional_capability() == "NewSubModuleContainer" 20 | 21 | """ 22 | 23 | def __init__(self, port: int, image: str) -> None: 24 | super().__init__(port, image) 25 | 26 | def additional_capability(self) -> str: 27 | return "NewSubModuleContainer" 28 | -------------------------------------------------------------------------------- /modules/test_module_import/tests/test_mock_one.py: -------------------------------------------------------------------------------- 1 | import httpx 2 | 3 | from testcontainers.core.waiting_utils import wait_for_logs 4 | from testcontainers.core.image import DockerImage 5 | from testcontainers.test_module_import import NewSubModuleContainer 6 | 7 | 8 | def test_like_doctest(): 9 | with DockerImage(path="./modules/generic/tests/samples/python_server", tag="test-new-mod:latest") as image: 10 | with NewSubModuleContainer(port=9000, image=image) as new_mod: 11 | url = new_mod._create_connection_url() 12 | response = httpx.get(f"{url}", timeout=5) 13 | assert response.status_code == 200, "Response status code is not 200" 14 | assert new_mod.additional_capability() == "NewSubModuleContainer" 15 | -------------------------------------------------------------------------------- /modules/trino/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.trino.TrinoContainer 2 | .. title:: testcontainers.trino.TrinoContainer 3 | -------------------------------------------------------------------------------- /modules/trino/testcontainers/trino/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | import re 14 | 15 | from testcontainers.core.config import testcontainers_config as c 16 | from testcontainers.core.generic import DbContainer 17 | from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs 18 | from trino.dbapi import connect 19 | 20 | 21 | class TrinoContainer(DbContainer): 22 | def __init__( 23 | self, 24 | image="trinodb/trino:latest", 25 | user: str = "test", 26 | port: int = 8080, 27 | **kwargs, 28 | ): 29 | super().__init__(image=image, **kwargs) 30 | self.user = user 31 | self.port = port 32 | self.with_exposed_ports(self.port) 33 | 34 | @wait_container_is_ready() 35 | def _connect(self) -> None: 36 | wait_for_logs( 37 | self, 38 | re.compile(".*======== SERVER STARTED ========.*", re.MULTILINE).search, 39 | c.max_tries, 40 | c.sleep_time, 41 | ) 42 | conn = connect( 43 | host=self.get_container_host_ip(), 44 | port=self.get_exposed_port(self.port), 45 | user=self.user, 46 | ) 47 | cur = conn.cursor() 48 | cur.execute("SELECT 1") 49 | cur.fetchall() 50 | conn.close() 51 | 52 | def get_connection_url(self): 53 | return f"trino://{self.user}@{self.get_container_host_ip()}:{self.port}" 54 | 55 | def _configure(self): 56 | pass 57 | -------------------------------------------------------------------------------- /modules/trino/tests/test_trino.py: -------------------------------------------------------------------------------- 1 | from testcontainers.trino import TrinoContainer 2 | from trino.dbapi import connect 3 | 4 | 5 | def test_docker_run_trino(): 6 | container = TrinoContainer("trinodb/trino:451") 7 | with container as trino: 8 | conn = connect( 9 | host=trino.get_container_host_ip(), 10 | port=trino.get_exposed_port(trino.port), 11 | user="test", 12 | ) 13 | cur = conn.cursor() 14 | cur.execute("SELECT version()") 15 | rows = cur.fetchall() 16 | assert rows[0][0] == "451" 17 | conn.close() 18 | -------------------------------------------------------------------------------- /modules/vault/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.vault.VaultContainer 2 | .. title:: testcontainers.vault.VaultContainer 3 | -------------------------------------------------------------------------------- /modules/vault/testcontainers/vault/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 3 | # not use this file except in compliance with the License. You may obtain 4 | # a copy of the License at 5 | # 6 | # http://www.apache.org/licenses/LICENSE-2.0 7 | # 8 | # Unless required by applicable law or agreed to in writing, software 9 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 10 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 11 | # License for the specific language governing permissions and limitations 12 | # under the License. 13 | 14 | from http.client import HTTPException 15 | from urllib.error import URLError 16 | from urllib.request import urlopen 17 | 18 | from testcontainers.core.container import DockerContainer 19 | from testcontainers.core.waiting_utils import wait_container_is_ready 20 | 21 | 22 | class VaultContainer(DockerContainer): 23 | """ 24 | Vault container. 25 | 26 | Example: 27 | 28 | .. doctest:: 29 | 30 | >>> from testcontainers.vault import VaultContainer 31 | >>> import hvac 32 | 33 | >>> with VaultContainer("hashicorp/vault:1.16.1") as vault_container: 34 | ... connection_url = vault_container.get_connection_url() 35 | ... client = hvac.Client(url=connection_url, token=vault_container.root_token) 36 | ... assert client.is_authenticated() 37 | ... # use root client to perform desired actions, e.g. 38 | ... policies = client.sys.list_acl_policies() 39 | """ 40 | 41 | def __init__( 42 | self, 43 | image: str = "hashicorp/vault:latest", 44 | port: int = 8200, 45 | root_token: str = "toor", 46 | **kwargs, 47 | ) -> None: 48 | super().__init__(image, **kwargs) 49 | self.port = port 50 | self.root_token = root_token 51 | self.with_exposed_ports(self.port) 52 | self.with_env("VAULT_DEV_ROOT_TOKEN_ID", self.root_token) 53 | 54 | def get_connection_url(self) -> str: 55 | """ 56 | Get the connection URL used to connect to the Vault container. 57 | 58 | Returns: 59 | str: The address to connect to. 60 | """ 61 | host_ip = self.get_container_host_ip() 62 | exposed_port = self.get_exposed_port(self.port) 63 | return f"http://{host_ip}:{exposed_port}" 64 | 65 | @wait_container_is_ready(HTTPException, URLError) 66 | def _healthcheck(self) -> None: 67 | url = f"{self.get_connection_url()}/v1/sys/health" 68 | with urlopen(url) as res: 69 | if res.status > 299: 70 | raise HTTPException() 71 | 72 | def start(self) -> "VaultContainer": 73 | super().start() 74 | self._healthcheck() 75 | return self 76 | -------------------------------------------------------------------------------- /modules/vault/tests/test_vault.py: -------------------------------------------------------------------------------- 1 | import hvac 2 | from testcontainers.vault import VaultContainer 3 | 4 | 5 | def test_docker_run_vault(): 6 | config = VaultContainer("hashicorp/vault:1.16.1") 7 | with config as vault: 8 | url = vault.get_connection_url() 9 | client = hvac.Client(url=url) 10 | status = client.sys.read_health_status() 11 | assert status.status_code == 200 12 | 13 | 14 | def test_docker_run_vault_act_as_root(): 15 | config = VaultContainer("hashicorp/vault:1.16.1") 16 | with config as vault: 17 | url = vault.get_connection_url() 18 | client = hvac.Client(url=url, token=vault.root_token) 19 | assert client.is_authenticated() 20 | assert client.sys.is_initialized() 21 | assert not client.sys.is_sealed() 22 | 23 | client.sys.enable_secrets_engine( 24 | backend_type="kv", 25 | path="secrets", 26 | config={ 27 | "version": "2", 28 | }, 29 | ) 30 | client.secrets.kv.v2.create_or_update_secret( 31 | path="my-secret", 32 | mount_point="secrets", 33 | secret={ 34 | "pssst": "this is secret", 35 | }, 36 | ) 37 | resp = client.secrets.kv.v2.read_secret( 38 | path="my-secret", 39 | mount_point="secrets", 40 | ) 41 | assert resp["data"]["data"]["pssst"] == "this is secret" 42 | -------------------------------------------------------------------------------- /modules/weaviate/README.rst: -------------------------------------------------------------------------------- 1 | .. autoclass:: testcontainers.weaviate.WeaviateContainer 2 | .. title:: testcontainers.weaviate.WeaviateContainer 3 | -------------------------------------------------------------------------------- /modules/weaviate/tests/test_weaviate.py: -------------------------------------------------------------------------------- 1 | from testcontainers.weaviate import WeaviateContainer 2 | import weaviate 3 | 4 | 5 | def test_docker_run_weaviate(): 6 | with WeaviateContainer() as container: 7 | client = weaviate.connect_to_custom( 8 | http_host=container.get_http_host(), 9 | http_port=container.get_http_port(), 10 | http_secure=container.get_http_secure(), 11 | grpc_host=container.get_grpc_host(), 12 | grpc_port=container.get_grpc_port(), 13 | grpc_secure=container.get_grpc_secure(), 14 | ) 15 | 16 | meta = client.get_meta() 17 | assert len(meta.get("version")) > 0 18 | 19 | client.close() 20 | 21 | 22 | def test_docker_run_weaviate_with_client(): 23 | with WeaviateContainer() as container: 24 | with container.get_client() as client: 25 | assert client.is_live() 26 | 27 | meta = client.get_meta() 28 | assert len(meta.get("version")) > 0 29 | 30 | 31 | def test_docker_run_weaviate_with_modules(): 32 | enable_modules = [ 33 | "backup-filesystem", 34 | "text2vec-openai", 35 | "text2vec-cohere", 36 | "text2vec-huggingface", 37 | "generative-openai", 38 | ] 39 | with WeaviateContainer( 40 | env_vars={ 41 | "ENABLE_MODULES": ",".join(enable_modules), 42 | "BACKUP_FILESYSTEM_PATH": "/tmp/backups", 43 | } 44 | ) as container: 45 | with container.get_client() as client: 46 | assert client.is_live() 47 | 48 | meta = client.get_meta() 49 | assert len(meta.get("version")) > 0 50 | 51 | modules = meta.get("modules") 52 | assert len(modules) == len(enable_modules) 53 | 54 | for name in enable_modules: 55 | assert len(modules[name]) > 0 56 | -------------------------------------------------------------------------------- /scripts/diagnostics.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from testcontainers.core import utils 4 | from testcontainers.core.container import DockerContainer 5 | 6 | result = { 7 | "is_linux": utils.is_linux(), 8 | "is_mac": utils.is_mac(), 9 | "is_windows": utils.is_windows(), 10 | "inside_container": utils.inside_container(), 11 | "default_gateway_ip": utils.default_gateway_ip(), 12 | } 13 | 14 | with DockerContainer("alpine:latest") as container: 15 | client = container.get_docker_client() 16 | result.update( 17 | { 18 | "container_host_ip": container.get_container_host_ip(), 19 | "docker_client_gateway_ip": client.gateway_ip(container._container.id), 20 | "docker_client_bridge_ip": client.bridge_ip(container._container.id), 21 | "docker_client_host": client.host(), 22 | } 23 | ) 24 | 25 | print(json.dumps(result, indent=2)) # noqa: T201 26 | -------------------------------------------------------------------------------- /scripts/mypy_report.py: -------------------------------------------------------------------------------- 1 | # Description: This script reads the output of mypy and generates a summary of errors by file. 2 | 3 | import re 4 | import sys 5 | 6 | from rich.console import Console 7 | from rich.table import Table 8 | 9 | # Regular expression to match file path and error count 10 | pattern = r"(.*\.py:\d+):\s+error: (.*)" 11 | 12 | error_dict = {} 13 | 14 | for line in sys.stdin: 15 | match = re.search(pattern, line) 16 | if match: 17 | # Extract file path and error message 18 | file_path, _ = match.group(1).split(":") 19 | error_message = match.group(2) 20 | 21 | if file_path not in error_dict: 22 | error_dict[file_path] = 1 23 | else: 24 | error_dict[file_path] += 1 25 | 26 | table = Table(title="Error Summary") 27 | table.add_column("File Path") 28 | table.add_column("Errors", justify="left") 29 | 30 | for file_path, error_count in error_dict.items(): 31 | table.add_row(file_path, str(error_count)) 32 | 33 | console = Console() 34 | console.print(table) 35 | console.print(f"[red]Found {sum(error_dict.values())} errors in {len(error_dict)} files.[/red]") 36 | --------------------------------------------------------------------------------