├── .github
├── release.yml
└── workflows
│ ├── pr.yaml
│ ├── python-client-gen.yml
│ └── release.yml
├── .gitignore
├── .markdownlint.yml
├── .pre-commit-config.yaml
├── .readthedocs.yaml
├── .vscode
└── extensions.json
├── ADR.md
├── CONTRIBUTING.md
├── DO_OPENAPI_COMMIT_SHA.txt
├── Dockerfile
├── LICENSE
├── Makefile
├── README.md
├── client_gen_config.md
├── docs
├── Makefile
├── make.bat
└── source
│ ├── conf.py
│ ├── index.rst
│ ├── modules.rst
│ ├── pydo.aio.operations.rst
│ ├── pydo.aio.rst
│ ├── pydo.operations.rst
│ └── pydo.rst
├── examples
├── customize_client_settings
│ ├── add_http_logger.py
│ ├── custom_endpoint.py
│ ├── custom_request_timeout.py
│ └── custom_user_agent.py
└── poc_droplets_volumes_sshkeys.py
├── package-lock.json
├── package.json
├── poetry.lock
├── pyproject.toml
├── requirements.txt
├── scripts
├── bumpversion.sh
├── openapi_changelist.sh
├── pr_body.md_tmpl
└── tag.sh
├── src
└── pydo
│ ├── __init__.py
│ ├── _client.py
│ ├── _configuration.py
│ ├── _patch.py
│ ├── _serialization.py
│ ├── _version.py
│ ├── aio
│ ├── __init__.py
│ ├── _client.py
│ ├── _configuration.py
│ ├── _patch.py
│ └── operations
│ │ ├── __init__.py
│ │ ├── _operations.py
│ │ └── _patch.py
│ ├── custom_policies.py
│ ├── exceptions.py
│ ├── operations
│ ├── __init__.py
│ ├── _operations.py
│ └── _patch.py
│ └── py.typed
└── tests
├── __init__.py
├── integration
├── __init__.py
├── conftest.py
├── defaults.py
├── shared.py
├── test_actions.py
├── test_app.py
├── test_app_deployment.py
├── test_billing.py
├── test_block_storage.py
├── test_cdn.py
├── test_container_registry.py
├── test_databases.py
├── test_domains.py
├── test_droplets.py
├── test_firewalls.py
├── test_images.py
├── test_kubernetes.py
├── test_load_balancers.py
├── test_monitoring.py
├── test_one_clicks.py
├── test_projects.py
├── test_regions.py
├── test_reserved_ips.py
├── test_sizes.py
├── test_snapshots.py
├── test_ssh_keys.py
├── test_tags.py
└── test_vpcs.py
└── mocked
├── __init__.py
├── conftest.py
├── data
└── kubernetes_data.py
├── test_account.py
├── test_actions.py
├── test_app.py
├── test_app_deployment.py
├── test_billing.py
├── test_block_storage.py
├── test_cdn.py
├── test_certificates.py
├── test_client_customizations.py
├── test_container_registry.py
├── test_databases.py
├── test_domains.py
├── test_droplets.py
├── test_firewalls.py
├── test_functions.py
├── test_image_actions.py
├── test_images.py
├── test_kubernetes.py
├── test_load_balancers.py
├── test_monitoring.py
├── test_one_click.py
├── test_project.py
├── test_regions.py
├── test_reserved_ips.py
├── test_sizes.py
├── test_snapshots.py
├── test_ssh_keys.py
├── test_tags.py
├── test_uptime.py
└── test_vpcs.py
/.github/release.yml:
--------------------------------------------------------------------------------
1 | # Automatic release notes configuration
2 | # https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes
3 |
4 | changelog:
5 | exclude:
6 | labels:
7 | - ignore-for-changelog
8 | categories:
9 | - title: "Breaking Changes :warning:"
10 | labels:
11 | - semver-major
12 | - breaking-change
13 | - title: "Features :tada:"
14 | labels:
15 | - semver-minor
16 | - enhancement
17 | - title: Bug fixes 🐞
18 | labels:
19 | - bug
20 | - title: "Other Changes :hammer_and_pick: (documentation, testing, etc)"
21 | labels:
22 | - "*"
23 |
--------------------------------------------------------------------------------
/.github/workflows/pr.yaml:
--------------------------------------------------------------------------------
1 | name: pr
2 |
3 | on:
4 | pull_request:
5 |
6 | jobs:
7 | lint:
8 | name: Lint
9 | runs-on: ubuntu-latest
10 |
11 | steps:
12 | - name: Check out code
13 | uses: actions/checkout@v2
14 |
15 | - name: Setup python
16 | uses: actions/setup-python@v4
17 | with:
18 | python-version: "3.10"
19 |
20 | - name: Install Poetry
21 | uses: snok/install-poetry@v1.3.4
22 | with:
23 | version: 1.6.1
24 | virtualenvs-path: .venv
25 | virtualenvs-create: true
26 | virtualenvs-in-project: true
27 | installer-parallel: false
28 |
29 | - name: Restore poetry cache
30 | if: success() && !env.ACT
31 | uses: actions/cache@v3
32 | with:
33 | path: $(poetry config cache-dir)
34 | key: ubuntu-latest-poetry-3.10-${{ hashFiles('**/pyproject.toml') }}
35 |
36 | - name: Lint tests
37 | run: make lint-tests
38 | env:
39 | # --disable=fixme: disables warnings for TODO comments
40 | PYLINT_ARGS: "--disable=fixme"
41 |
42 | - name: Lint docs
43 | uses: avto-dev/markdown-lint@v1
44 | with:
45 | args: "."
46 |
47 | test:
48 | name: Test
49 | runs-on: ubuntu-latest
50 |
51 | needs:
52 | - lint
53 |
54 | strategy:
55 | fail-fast: true
56 | matrix:
57 | python-version:
58 | - "3.8"
59 | - "3.9"
60 | - "3.10"
61 |
62 | steps:
63 | - name: Check out code
64 | uses: actions/checkout@v2
65 |
66 | - name: Setup python
67 | uses: actions/setup-python@v4
68 | with:
69 | python-version: ${{ matrix.python-version }}
70 |
71 | - name: Install Poetry
72 | uses: snok/install-poetry@v1.3.1
73 | with:
74 | version: 1.6.1
75 | virtualenvs-path: .venv
76 | virtualenvs-create: true
77 | virtualenvs-in-project: true
78 | installer-parallel: false
79 |
80 | - name: Restore poetry cache
81 | if: success() && !env.ACT
82 | uses: actions/cache@v3
83 | with:
84 | path: $(poetry config cache-dir)
85 | key: ubuntu-latest-poetry-${{ matrix.python-version }}-${{ hashFiles('**/pyproject.toml') }}
86 |
87 | - name: Run mocked tests
88 | run: make test-mocked
89 |
--------------------------------------------------------------------------------
/.github/workflows/python-client-gen.yml:
--------------------------------------------------------------------------------
1 | name: Python Client Generation
2 | on:
3 | workflow_dispatch:
4 | inputs:
5 | openapi_short_sha:
6 | description: "The short commit sha that triggered the workflow"
7 | required: true
8 | type: string
9 |
10 | env:
11 | NEW_BRANCH: openapi-${{ github.event.inputs.openapi_short_sha }}/clientgen
12 |
13 | jobs:
14 | Generate-Python-Client:
15 | runs-on: ubuntu-latest
16 |
17 | steps:
18 | - uses: actions/checkout@v3
19 |
20 | - name: OpenAPI Changelist
21 | env:
22 | GH_TOKEN: ${{ secrets.WORKFLOW_TOKEN }}
23 | run: |
24 | current_sha=$(cat DO_OPENAPI_COMMIT_SHA.txt)
25 | echo "current_sha=$current_sha" >> $GITHUB_ENV
26 | target_sha=${{ github.event.inputs.openapi_short_sha }}
27 | scripts/openapi_changelist.sh $current_sha $target_sha > changelist.md
28 |
29 | - name: Removes all generated code
30 | run: make clean
31 |
32 | - name: Download spec file and Update DO_OPENAPI_COMMIT_SHA.txt
33 | run: |
34 | curl --fail https://api-engineering.nyc3.digitaloceanspaces.com/spec-ci/DigitalOcean-public-${{ github.event.inputs.openapi_short_sha }}.v2.yaml -o DigitalOcean-public.v2.yaml
35 | echo ${{ github.event.inputs.openapi_short_sha }} > DO_OPENAPI_COMMIT_SHA.txt
36 | env:
37 | GH_TOKEN: ${{ secrets.WORKFLOW_TOKEN }}
38 |
39 | - uses: actions/upload-artifact@v4
40 | with:
41 | name: DigitalOcean-public.v2
42 | path: ./DigitalOcean-public.v2.yaml
43 |
44 | - name: Checkout new Branch
45 | run: git checkout -b ${{ env.NEW_BRANCH }}
46 | env:
47 | GH_TOKEN: ${{ secrets.WORKFLOW_TOKEN }}
48 |
49 | - name: Install Poetry
50 | uses: snok/install-poetry@v1.3.4
51 | with:
52 | version: 1.6.1
53 | virtualenvs-path: .venv
54 | virtualenvs-create: true
55 | virtualenvs-in-project: true
56 | installer-parallel: false
57 |
58 | - name: Generate Python client
59 | run: make generate
60 |
61 | - name: Generate Python client documentation
62 | run: make generate-docs
63 |
64 | - name: Add and commit changes
65 | id: add-commit-changes
66 | continue-on-error: true
67 | run: |
68 | git config --global user.email "api-engineering@digitalocean.com"
69 | git config --global user.name "API Engineering"
70 | git add .
71 | git commit -m "[bot] Updated client based on ${{ env.NEW_BRANCH }}"
72 | git push --set-upstream origin ${{ env.NEW_BRANCH }}
73 | env:
74 | GH_TOKEN: ${{ secrets.WORKFLOW_TOKEN }}
75 |
76 | - name: Check if branch existed
77 | # If steps.create-pr was anything but successful, it's possible that
78 | # the branch was created in previous pipeline runs so it should be manually deleted.
79 | if: steps.add-commit-changes.outcome != 'success'
80 | run: |
81 | echo "Add and commit changes step failed. It's possible the branch ${{ env.NEW_BRANCH }} already existed. Please delete the branch and re-run the pipeline."
82 | exit 1
83 |
84 | - name: Set pr_title outputs
85 | id: pr_title_var
86 | run: echo "PR_TITLE=$(gh pr list --search "${{ github.event.inputs.openapi_short_sha }}" --json title --jq '.[0].title' --repo digitalocean/openapi --state merged)" >> $GITHUB_OUTPUT
87 | env:
88 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
89 | - name: Check pr_title outputs
90 | run: echo "${{ steps.pr_title_var.outputs.PR_TITLE }}"
91 |
92 | - name: Create Pull Request
93 | id: create-pr
94 | if: steps.add-commit-changes.outcome == 'success'
95 | run: |
96 | export CURRENT="$(cat DO_OPENAPI_COMMIT_SHA.txt)"
97 | export TARGET="${{ github.event.inputs.openapi_short_sha }}"
98 | export TITLE="${{ steps.pr_title_var.outputs.PR_TITLE }}"
99 | envsubst < scripts/pr_body.md_tmpl > pr_body.md
100 | cat changelist.md >> pr_body.md
101 |
102 | echo "PR BODY:"
103 | cat pr_body.md
104 |
105 | gh pr create \
106 | --title "[bot] $TITLE: Re-Generated From digitalocean/openapi@$TARGET" \
107 | --body-file pr_body.md \
108 | --head "${{ env.NEW_BRANCH }}" \
109 | -r digitalocean/api-cli
110 | env:
111 | GH_TOKEN: ${{ secrets.WORKFLOW_TOKEN }}
112 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: tagged-release
2 |
3 | on:
4 | push:
5 | tags:
6 | - "v[0-9].[0-9]+.[0-9]+"
7 |
8 | jobs:
9 | build-and-publish:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Checkout
13 | uses: actions/checkout@v2.5.0
14 |
15 | - name: Setup python
16 | uses: actions/setup-python@v4
17 | with:
18 | python-version: "3.10"
19 |
20 | - name: Install Poetry
21 | uses: snok/install-poetry@v1.3.4
22 | with:
23 | version: 1.6.1
24 | virtualenvs-path: .venv
25 | virtualenvs-create: true
26 | virtualenvs-in-project: true
27 | installer-parallel: false
28 |
29 | - name: Restore poetry cache
30 | if: success() && !env.ACT
31 | uses: actions/cache@v3
32 | with:
33 | path: $(poetry config cache-dir)
34 | key: ubuntu-latest-poetry-${{ matrix.python-version }}-${{ hashFiles('**/pyproject.toml') }}
35 |
36 | - name: Build
37 | run: poetry build
38 |
39 | - name: Upload a Build Artifacts
40 | uses: actions/upload-artifact@v4
41 | with:
42 | name: dist
43 | path: dist/
44 |
45 | - name: Publish
46 | run: poetry publish
47 | env:
48 | POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }}
49 |
50 | release:
51 | runs-on: ubuntu-latest
52 |
53 | needs: build-and-publish
54 |
55 | steps:
56 | - name: Checkout
57 | uses: actions/checkout@v2.5.0
58 |
59 | - name: Download the Build Artifacts
60 | uses: actions/download-artifact@v4
61 | with:
62 | name: dist
63 | path: dist/
64 |
65 | - name: Set TAG
66 | run: echo "TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
67 |
68 | - name: Create release
69 | run: |
70 | gh release create ${{ env.TAG }} \
71 | dist/* \
72 | --title "${{ env.TAG }}" \
73 | --generate-notes \
74 | --draft
75 | env:
76 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
77 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 | venv/
5 | .vscode/
6 | !.vscode/extensions.json
7 | .idea/
8 | .env
9 | *.log
10 | *.swp
11 |
12 | # Python
13 | .python-version
14 | __pycache__/
15 | *.pyc
16 | build
17 | dist
18 | pydo.egg-info
19 |
20 | # client gen
21 | DigitalOcean-public.v2.yaml
22 | node_modules/
23 | testscript.py
24 | changelist.md
25 |
--------------------------------------------------------------------------------
/.markdownlint.yml:
--------------------------------------------------------------------------------
1 | MD013: false
2 |
3 | MD025: false
4 |
5 | MD033: false
6 |
7 | MD024: false
8 |
9 | MD001: false
10 |
11 | MD046:
12 | style: fenced
13 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/psf/black
3 | rev: 22.10.0
4 | hooks:
5 | - id: black
6 | args: ["tests"]
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | build:
4 | os: ubuntu-22.04
5 | tools:
6 | python: "3.8"
7 | jobs:
8 | # need to change references of GeneratedClient to Client to avoid confusion
9 | pre_build:
10 | - echo "find . -type f -name 'index.html' | xargs sed -i '' 's/GeneratedClient/Client/g'"
11 |
12 | sphinx:
13 | configuration: docs/source/conf.py
14 |
15 | # Optionally declare the Python requirements required to build your docs
16 | python:
17 | install:
18 | - requirements: requirements.txt
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "ms-python.python",
4 | "cameron.vscode-pytest"
5 | ]
6 | }
--------------------------------------------------------------------------------
/ADR.md:
--------------------------------------------------------------------------------
1 | # Architecture Decision Record for Pydo
2 |
3 | ## Title: Client to be Regenerated on digitalocean/openapi Commit instead of Release
4 |
5 | ## Date: 10/31/2022
6 |
7 | ## Description
8 |
9 | The python-client-gen workflow will be triggered with an digitalocean/openapi commit instead of an digitalocean/openapi tagged release.
10 |
11 | ## Additional Context
12 |
13 | We decided to not support releases in digitalocean/openapi repo because it would be too much of a maintenance for that repo. Therefore, to keep Pydo and our DO spec up to date with each other, we will trigger the workflow on a commit to digitalocean/openapi main's branch.
14 |
15 | ## Title: Remove Third-Party Create PR Github Actions Step
16 |
17 | ## Date: 10/31/2022
18 |
19 | ## Description
20 |
21 | We will be removing the use of peter-evans/create-pull-request@v4 from our workflow because it caused our actions PR checks to choke.
22 |
23 | ## Additional Context
24 |
25 | The peter-evans/create-pull-request@v4 doc [here](https://github.com/peter-evans/create-pull-request/blob/main/docs/concepts-guidelines.md#triggering-further-workflow-runs) mentions that pull requests created by the action using the default GITHUB_TOKEN cannot trigger other workflows.
26 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to the DO Python Client
2 |
3 | >First: if you're unsure or afraid of anything, just ask or submit the issue or pull request anyways. You won't be yelled at for giving your best effort. The worst that can happen is that you'll be politely asked to change something. We appreciate all contributions!
4 |
5 | ## A Little Bit of Context
6 |
7 | The DigitalOcean Python client is generated using [AutoRest](https://github.com/Azure/autorest). The AutoRest tool generates client libraries for accessing RESTful web services. Input to AutoRest is a spec that describes the DigitalOcean REST API using the OpenAPI 3.0 Specification format. The spec can be found [here](https://github.com/digitalocean/openapi). AutoRest allows customizations to be made on top of the generated code.
8 |
9 | ## Generating PyDo Locally
10 |
11 | PyDo is a generated client. This section will walk you through generating the client locally. One might ask, when would one want to generate PyDo locally? Local generation is really helpful when making changes to the client configuration itself. It is good practice to re-generate the client to ensure the behavior is as expected.
12 |
13 | ### Prerequisites
14 |
15 | * Python version: >= 3.9 (it's highly recommended to use a python version management tool)
16 | * [poetry](https://python-poetry.org/): python packaging and dependency management
17 | * [AutoRest](https://github.com/Azure/autorest): The tool that generates the client libraries for accessing RESTful web services.
18 |
19 | ### Optional but highly recommended
20 |
21 | We chose not to tie this repository to tooling for managing python installations. This allows developers to use their preferred tooling.
22 |
23 | We like these:
24 |
25 | * [pyenv](https://github.com/pyenv/pyenv): python version management
26 | * [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv):
27 | a pyenv plugin to enable pyenv to manage virtualenvs for development
28 | environment isolation
29 |
30 | ### Setup
31 |
32 | 1. Clone this repository. Run:
33 |
34 | ```sh
35 | git clone git@github.com:digitalocean/pydo.git
36 | cd pydo
37 | ```
38 |
39 | 2. (Optional) Ensure you have the right version of python installed using your preferred python version manager. This is what you'd run if you used `pyenv`:
40 |
41 | ```sh
42 | pyenv install 3.9.4
43 | ```
44 |
45 | This can take a few minutes.
46 |
47 | 3. Install the package dependencies
48 |
49 | ```sh
50 | poetry install
51 | ```
52 |
53 | 4. You can now activate your virtual environment
54 |
55 | ```sh
56 | poetry shell
57 | ```
58 |
59 | ### Using `make` commands to re-generate the client
60 |
61 | 1. Remove the previous generated code.
62 |
63 | ```sh
64 | make clean
65 | ```
66 |
67 | 2. Re-download the latest DO OpenAPI 3.0 Specification.
68 |
69 | ```sh
70 | make download-spec
71 | ```
72 |
73 | 3. Generate the client
74 |
75 | ```sh
76 | make generate
77 | ```
78 |
79 | It is also good practice to run mock tests against the changes using the following make command:
80 |
81 | ```sh
82 | make test-mocked
83 | ```
84 |
85 | ## Customizing the Client Using Patch Files
86 |
87 | On top of generating our client, we've added a few customizations to create an optimal user experience. These customizations can by making changes to the `_patch.py` file. To learn more about adding customizations, please follow Autorest's documentation for it [here](https://github.com/Azure/autorest.python/blob/autorestv3/docs/customizations.md)
88 |
89 | ## Releasing `pydo`
90 |
91 | The repo uses GitHub workflows to publish a draft release when a new tag is
92 | pushed. We use [semver](https://semver.org/#summary) to determine the version
93 | number vor the tag.
94 |
95 | 1. Run `make changes` to review the merged PRs since last release and decide what kind of release you are doing (bugfix, feature or breaking).
96 | * Review the tags on each PR and make sure they are categorized
97 | appropriately.
98 |
99 | 1. Run `BUMP=(bugfix|feature|breaking) make bump_version` to update the `pydo`
100 | version.
101 | `BUMP` also accepts `(patch|minor|major)`
102 |
103 | Command example:
104 |
105 | ```bash
106 | make BUMP=minor bump_version
107 | ```
108 |
109 | 1. Update the Pydo version in `pyproject.toml` and `src/pydo/_version.py`. Create a separate PR with only these changes.
110 |
111 | 1. Once the PR has been pushed and merged, tag the commit to trigger the
112 | release workflow: run `make tag` to tag the latest commit and push the tag to ORIGIN.
113 |
114 | Notes:
115 | * To tag an earlier commit, run `COMMIT=${commit} make tag`.
116 | * To push the tag to a different remote, run `ORIGIN=${REMOTE} make tag`.
117 |
118 | 1. Once the release process completes, review the draft release for correctness and publish the release.
119 | Ensure the release has been marked `Latest`.
120 |
--------------------------------------------------------------------------------
/DO_OPENAPI_COMMIT_SHA.txt:
--------------------------------------------------------------------------------
1 | 5e8a280
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM fnndsc/python-poetry
2 | COPY pyproject.toml ./
3 | COPY src ./src
4 |
5 | RUN poetry install
6 |
7 | ENTRYPOINT ["poetry", "run"]
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | LOCAL_SPEC_FILE=./DigitalOcean-public.v2.yaml
2 | MODELERFOUR_VERSION="4.23.6"
3 | AUTOREST_PYTHON_VERSION="6.13.16"
4 | POETRY_VERSION:=$(shell poetry version)
5 | PACKAGE_VERSION:=$(lastword $(POETRY_VERSION))
6 | ROOT_DIR := $(dir $(realpath $(lastword $(MAKEFILE_LIST))))
7 | ORIGIN ?= origin
8 | BUMP ?= patch
9 |
10 | ifeq (, $(findstring -m,$(PYTEST_ARGS)))
11 | PYTEST_EXCLUDE_MARKS=-m "not real_billing"
12 | endif
13 |
14 | .PHONY: help
15 | help:
16 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'; \
17 | printf "\nNOTE: Run 'SPEC_FILE=path/to/local/spec make generate' to skip the download and use a local spec file.\n"
18 |
19 | .PHONY: dev-dependencies
20 | dev-dependencies: ## Install development tooling
21 | npm install --only=dev
22 |
23 | .PHONY: clean
24 | clean: ## Removes all generated code (except _patch.py files)
25 | @printf "=== Cleaning src directory\n"
26 | @find src/pydo -type f ! -name "_patch.py" ! -name "custom_*.py" ! -name "exceptions.py" -exec rm -rf {} +
27 |
28 | .PHONY: download-spec
29 | download-spec: ## Download Latest DO Spec
30 | @echo Downloading published spec; \
31 | touch DigitalOcean-public.v2.yaml && \
32 | curl https://api-engineering.nyc3.digitaloceanspaces.com/spec-ci/DigitalOcean-public.v2.yaml -o $(LOCAL_SPEC_FILE)
33 |
34 | .PHONY: generate
35 | ifndef SPEC_FILE
36 | generate: SPEC_FILE = $(LOCAL_SPEC_FILE)
37 | generate: dev-dependencies download-spec ## Generates the python client using the latest published spec first.
38 | endif
39 | generate: install clean dev-dependencies
40 | @printf "=== Generating client with spec: $(SPEC_FILE)\n\n"; \
41 | npm run autorest -- client_gen_config.md \
42 | --use:@autorest/modelerfour@$(MODELERFOUR_VERSION) \
43 | --use:@autorest/python@$(AUTOREST_PYTHON_VERSION) \
44 | --package-version=$(PACKAGE_VERSION) \
45 | --input-file=$(SPEC_FILE)
46 | @poetry run black src
47 |
48 | .PHONY: install
49 | install: ## Install test dependencies
50 | ifneq (, $(shell which poetry))
51 | poetry install --no-interaction -E aio
52 | else
53 | @(echo "poetry is not installed. See https://python-poetry.org/docs/#installation for more info."; exit 1)
54 | endif
55 |
56 | .PHONY: dev
57 | dev: dev-dependencies ## Turns the current terminal into a poetry env
58 | poetry shell
59 |
60 | .PHONY: lint-tests
61 | lint-tests: install ## Lints the code
62 | poetry run black --check --diff tests/. && \
63 | poetry run pylint $(PYLINT_ARGS) tests/.
64 |
65 | .PHONY: test-mocked
66 | test-mocked: install ## Runs the mock test suite
67 | poetry run pytest -rA --tb=short tests/mocked/. $(PYTEST_ARGS)
68 |
69 | .PHONY: test-mocked
70 | test-integration: install ## Runs the integration test suite
71 | poetry run pytest -rA --tb=short tests/integration/. $(PYTEST_EXCLUDE_MARKS) $(PYTEST_ARGS)
72 |
73 | .PHONY: test-mocked
74 | test-integration-single: install ## This command runs a single integration test, e.g. > make test-integration-single test=test_actions
75 | poetry run pytest -rA --tb=short tests/integration/. -k $(test)
76 |
77 | .PHONY: docker-build
78 | docker-build:
79 | docker build -t pydo:dev .
80 |
81 | .PHONY: docker-python
82 | docker-python: docker-build ## Runs a python shell within a docker container
83 | docker run -it --rm --name pydo pydo:dev python
84 |
85 | .PHONY: lint-docs
86 | lint-docs:
87 | docker run -v $(ROOT_DIR):/workdir ghcr.io/igorshubovych/markdownlint-cli:latest "*.md"
88 |
89 | .PHONY: generate-docs
90 | generate-docs: install ## readthedocs requires a requirements.txt file, this step converts poetry file to requirements.txt file before re-gen the docs
91 | @echo Generating documentation...;
92 | @echo Converting poetry file to requirements.txt...;
93 | poetry export -f requirements.txt -o requirements.txt --without-hashes --with dev
94 | cd docs && \
95 | poetry run sphinx-apidoc -o source/ ../src/pydo && \
96 | poetry run make html
97 |
98 | .PHONY: clean-docs
99 | clean-docs: ## Delete everything in docs/build/html
100 | cd docs && \
101 | poetry run make clean
102 |
103 | .PHONY: _install_github_release_notes
104 | _install_github_release_notes:
105 | @GO111MODULE=off go get -u github.com/digitalocean/github-changelog-generator
106 |
107 | .PHONY: changes
108 | changes: _install_github_release_notes
109 | @echo "==> Merged PRs since last release"
110 | @echo ""
111 | @github-changelog-generator -org digitalocean -repo pydo
112 |
113 | .PHONY: version
114 | version:
115 | @poetry version
116 |
117 | .PHONY: bump_version
118 | bump_version: ## Bumps the version
119 | @echo "==> BUMP=${BUMP} bump_version"
120 | @echo ""
121 | @ORIGIN=${ORIGIN} scripts/bumpversion.sh
122 |
123 | .PHONY: tag
124 | tag: ## Tags a release
125 | @echo "==> ORIGIN=${ORIGIN} COMMIT=${COMMIT} tag"
126 | @echo ""
127 | @ORIGIN=${ORIGIN} scripts/tag.sh
128 |
--------------------------------------------------------------------------------
/client_gen_config.md:
--------------------------------------------------------------------------------
1 | # Autorest config
2 |
3 | ```yaml
4 | title: DigitalOceanClient
5 | namespace: pydo
6 | python: true
7 | black: true
8 | output-folder: src/
9 | verbose: true
10 | version-tolerant: true
11 | override-client-name: GeneratedClient
12 |
13 | add-credential: true
14 | credential-default-policy-type: BearerTokenCredentialPolicy
15 | credential-scopes: https://api.digitalocean.com
16 |
17 | directive:
18 | - from: openapi-document
19 | where: '$.components.parameters[*]'
20 | transform: >
21 | $["x-ms-parameter-location"] = "method";
22 |
23 | # Floating IP operations have been deprecated in favor of reserved IPs.
24 | - remove-operation: floatingIPs_get
25 | - remove-operation: floatingIPs_create
26 | - remove-operation: floatingIPs_list
27 | - remove-operation: floatingIPs_delete
28 | - remove-operation: floatingIPsAction_get
29 | - remove-operation: floatingIPsAction_list
30 | - remove-operation: floatingIPsAction_post
31 |
32 | - from: openapi-document
33 | where: '$.components.responses.unauthorized'
34 | transform: >
35 | $["x-ms-error-response"] = true;
36 | - from: openapi-document
37 | where: '$.components.responses.too_many_requests'
38 | transform: >
39 | $["x-ms-error-response"] = true;
40 | - from: openapi-document
41 | where: '$.components.responses.server_error'
42 | transform: >
43 | $["x-ms-error-response"] = true;
44 | - from: openapi-document
45 | where: '$.components.responses.unexpected_error'
46 | transform: >
47 | $["x-ms-error-response"] = true;
48 |
49 | - from: openapi-document
50 | where: '$..["log_line_prefix"]'
51 | transform: >
52 | $["x-ms-enum"] = {
53 | "name": "PostfixLogLinePrefix",
54 | "modelAsString": false,
55 | "values": [
56 | {
57 | "value": "pid=%p,user=%u,db=%d,app=%a,client=%h",
58 | "name": "First Option"
59 | },
60 | {
61 | "value": "%m [%p] %q[user=%u,db=%d,app=%a]",
62 | "name": "Second Option"
63 | },
64 | {
65 | "value": "%t [%p]: [%l-1] user=%u,db=%d,app=%a,client=%h",
66 | "name": "Third Option"
67 | }
68 | ]
69 | };
70 |
71 | # Remove accept and content-type arguments from the app.Create method.
72 | - from: openapi-document
73 | where: '$.paths."/v2/apps".post'
74 | transform: >
75 | $["parameters"] = [];
76 | ```
77 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | %SPHINXBUILD% >NUL 2>NUL
14 | if errorlevel 9009 (
15 | echo.
16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
17 | echo.installed, then set the SPHINXBUILD environment variable to point
18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
19 | echo.may add the Sphinx directory to PATH.
20 | echo.
21 | echo.If you don't have Sphinx installed, grab it from
22 | echo.https://www.sphinx-doc.org/
23 | exit /b 1
24 | )
25 |
26 | if "%1" == "" goto help
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # For the full list of built-in configuration values, see the documentation:
4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
5 |
6 | # -- Project information -----------------------------------------------------
7 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
8 | import os
9 | import sys
10 | # This allows sphinx to locate the client source code.
11 | sys.path.insert(0, os.path.abspath('../../src/'))
12 |
13 | project = 'PyDo'
14 | copyright = '2022, DigitalOcean'
15 | author = 'DigitalOcean'
16 | release = '1.0.0'
17 |
18 | # -- General configuration ---------------------------------------------------
19 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
20 | # This extension generates the documentation
21 | extensions = ['sphinx.ext.autodoc']
22 |
23 | templates_path = ['_templates']
24 | exclude_patterns = []
25 |
26 |
27 |
28 | # -- Options for HTML output -------------------------------------------------
29 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
30 | # This configuration determines the style of the html page
31 | html_theme = 'sphinx_rtd_theme'
32 | html_static_path = ['_static']
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. PyDo documentation master file, created by
2 | sphinx-quickstart on Mon Nov 7 12:26:30 2022.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | #########################################
7 | :mod:`PyDo` --- DigitalOcean's Python library
8 | #########################################
9 |
10 | .. module:: pydo
11 |
12 | :mod:`pydo` is a Python client library for DigitalOceans's `HTTP API
13 | `_.
14 |
15 | Installation
16 | ============
17 |
18 | Install from PyPI::
19 |
20 | pip install pydo
21 |
22 |
23 | Initialization
24 | ==============
25 |
26 | :mod:`pydo` must be initialized with :meth:`pydo.client`. A
27 | DigitalOcean API Token is required. The token can be passed explicitly to :meth:`pydo.client` or defined as environment variables
28 | ``DIGITALOCEAN_TOKEN``.
29 |
30 | Here's an example of initializing the PyDo Client::
31 |
32 | from pydo import Client
33 |
34 | client = Client(token="")
35 |
36 | .. autofunction:: pydo.Client
37 |
38 | Example
39 | ===========
40 | Find below a working example for GET a ssh_key (`per this http request
41 | `_) and printing the ID associated with the ssh key. If you'd like to try out this quick example, you can follow these instructions to add ssh keys to your DO account::
42 |
43 | from pydo import Client
44 |
45 | client = Client(token="")
46 |
47 | ssh_keys_resp = client.ssh_keys.list()
48 | for k in ssh_keys_resp["ssh_keys"]:
49 | print(f"ID: {k['id']}, NAME: {k['name']}, FINGERPRINT: {k['fingerprint']}")
50 |
51 | The above code snippet should output the following::
52 |
53 | ID: 123456, NAME: my_test_ssh_key, FINGERPRINT: 5c:74:7e:60:28:69:34:ca:dd:74:67:c3:f3:00:7f:fe
54 | ID: 123457, NAME: my_prod_ssh_key, FINGERPRINT: eb:76:c7:2a:d3:3e:80:5d:ef:2e:ca:86:d7:79:94:0d
55 |
56 | You can find a more thorough example of using the PyDo client `here
57 | `_.
58 | The example walks through the process of creating a droplet with a specified ssh key, creating a volume, and then attaching the volume to the droplet.
59 |
60 | Pagination
61 | ~~~~~~~~~~~
62 | Below is an example on handling pagination. One must parse the URL to find the next page::
63 |
64 | resp = self.client.ssh_keys.list(per_page=50, page=page)
65 | pages = resp.links.pages
66 | if 'next' in pages.keys():
67 | parsed_url = urlparse(pages['next'])
68 | page = parse_qs(parsed_url.query)['page'][0]
69 | else:
70 | paginated = False
71 |
72 |
73 | pydo.Client Usage
74 | ===========
75 |
76 | .. automodule:: pydo.operations
77 | :members:
78 | :undoc-members:
79 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/source/modules.rst:
--------------------------------------------------------------------------------
1 | pydo
2 | ====
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | pydo
8 |
--------------------------------------------------------------------------------
/docs/source/pydo.aio.operations.rst:
--------------------------------------------------------------------------------
1 | pydo.aio.operations package
2 | ===========================
3 |
4 | Module contents
5 | ---------------
6 |
7 | .. automodule:: pydo.aio.operations
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
--------------------------------------------------------------------------------
/docs/source/pydo.aio.rst:
--------------------------------------------------------------------------------
1 | pydo.aio package
2 | ================
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | pydo.aio.operations
11 |
12 | Module contents
13 | ---------------
14 |
15 | .. automodule:: pydo.aio
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
--------------------------------------------------------------------------------
/docs/source/pydo.operations.rst:
--------------------------------------------------------------------------------
1 | pydo.operations package
2 | =======================
3 |
4 | Module contents
5 | ---------------
6 |
7 | .. automodule:: pydo.operations
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
--------------------------------------------------------------------------------
/docs/source/pydo.rst:
--------------------------------------------------------------------------------
1 | pydo package
2 | ============
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 | :maxdepth: 4
9 |
10 | pydo.aio
11 | pydo.operations
12 |
13 | Submodules
14 | ----------
15 |
16 | pydo.custom\_policies module
17 | ----------------------------
18 |
19 | .. automodule:: pydo.custom_policies
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
24 | pydo.exceptions module
25 | ----------------------
26 |
27 | .. automodule:: pydo.exceptions
28 | :members:
29 | :undoc-members:
30 | :show-inheritance:
31 |
32 | Module contents
33 | ---------------
34 |
35 | .. automodule:: pydo
36 | :members:
37 | :undoc-members:
38 | :show-inheritance:
39 |
--------------------------------------------------------------------------------
/examples/customize_client_settings/add_http_logger.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 |
4 | from pydo import Client
5 |
6 | LOG_FILE = "simple_ssh_keys.log"
7 |
8 | # Initialize the application logger
9 | logging.basicConfig(filename=LOG_FILE, level=logging.DEBUG)
10 | LOGGER = logging.getLogger(__name__)
11 |
12 | token = os.environ.get("DIGITALOCEAN_TOKEN")
13 | if token == "":
14 | raise Exception("No DigitalOcean API token in DIGITALOCEAN_TOKEN env var")
15 | # Initialize the client with the `logger` kwarg set to the application's logger.
16 | client = Client(
17 | token,
18 | logger=LOGGER,
19 | )
20 |
21 | keys_resp = client.ssh_keys.list()
22 | total = keys_resp["meta"]["total"]
23 |
24 | print(f"TOTAL SSH KEYS ({total})\n")
25 | print("ID\tNAME\tFINGERPRINT")
26 | for d in keys_resp["ssh_keys"]:
27 | print(f"{d['id']}\t{d['name']}\t{d['fingerprint']}")
28 |
29 | print(f"\nView the HTTP log: {LOG_FILE}")
30 |
--------------------------------------------------------------------------------
/examples/customize_client_settings/custom_endpoint.py:
--------------------------------------------------------------------------------
1 | from os import environ
2 |
3 | from pydo import Client
4 |
5 | # Set the DO_ENDPOINT environment variable to a valid endpoint
6 | ENDPOINT = environ.get("DO_ENDPOINT") or "https://my.proxy"
7 |
8 | token = environ.get("DIGITALOCEAN_TOKEN")
9 | if token == "":
10 | raise Exception("No DigitalOcean API token in DIGITALOCEAN_TOKEN env var")
11 |
12 | # Initialize the client with the `endpoint` kwarg set to the custom endpoint.
13 | client = Client(token, endpoint=ENDPOINT)
14 | droplets_resp = client.droplets.list()
15 |
16 | total = droplets_resp["meta"]["total"]
17 |
18 | print(f"TOTAL DROPLETS ({total})\n")
19 | print("ID\tNAME\tSTATUS")
20 | for d in droplets_resp["droplets"]:
21 | print(f"{d['id']}\t{d['name']}\t{d['status']}")
22 |
--------------------------------------------------------------------------------
/examples/customize_client_settings/custom_request_timeout.py:
--------------------------------------------------------------------------------
1 | import random
2 | import string
3 | from os import environ
4 |
5 | from pydo import Client
6 |
7 | KUBERNETES_VERSION = "latest"
8 | REGION = "nyc3"
9 | TIMEOUT_APP = 120
10 | TIMEOUT_KUBERNETES_CREATE = 1200
11 |
12 | token = environ.get("DIGITALOCEAN_TOKEN")
13 | if token == "":
14 | raise Exception("No DigitalOcean API token in DIGITALOCEAN_TOKEN env var")
15 |
16 | # Overwrite the default timeout set by the client with your own
17 | client = Client(token, timeout=TIMEOUT_APP)
18 |
19 | # Normal operation calls will use the app's timeout
20 | clusters_resp = client.kubernetes.list_clusters()
21 | total = clusters_resp["meta"]["total"]
22 |
23 | print(f"TOTAL CLUSTERS ({total})\n")
24 | print("ID\tNAME\tSTATE")
25 | for d in clusters_resp["kubernetes_clusters"]:
26 | print(f"{d['id']}\t{d['name']}\t{d['status']['state']}")
27 |
28 | rnd_suffix = s = "".join(random.choices(string.ascii_uppercase + string.digits, k=8))
29 | new_cluster_req = {
30 | "name": f"example-cluster-{rnd_suffix}",
31 | "region": REGION,
32 | "version": KUBERNETES_VERSION,
33 | "node_pools": [{"size": "s-1vcpu-2gb", "count": 3, "name": "worker-pool"}],
34 | }
35 |
36 | # Setting the `timeout` kwarg value for a specific operation method call will overwrite
37 | # the timeout for that request.
38 | cluster_create_resp = client.kubernetes.create_cluster(
39 | new_cluster_req, timeout=TIMEOUT_KUBERNETES_CREATE
40 | )
41 | # Note: This method was chosen for the sake of the example. The `create_cluster`
42 | # kubernetes operation isn't a log running process (unlike the background action that
43 | # tracks the clusters provisioning state).
44 |
45 | new_cluster_id = cluster_create_resp["kubernetes_cluster"]["id"]
46 | new_cluster_name = cluster_create_resp["kubernetes_cluster"]["name"]
47 | new_cluster_created_at = cluster_create_resp["kubernetes_cluster"]["created_at"]
48 | new_cluster_status = cluster_create_resp["kubernetes_cluster"]["status"]["message"]
49 | print(f"New cluster: Name {new_cluster_name} (ID: {new_cluster_id})")
50 | print(f"New cluster: created at {new_cluster_created_at}")
51 | print(f"New cluster: Status: {new_cluster_status}")
52 |
--------------------------------------------------------------------------------
/examples/customize_client_settings/custom_user_agent.py:
--------------------------------------------------------------------------------
1 | from os import environ
2 |
3 | from pydo import Client
4 |
5 | # Define a custom value for your application's user-agent
6 | USER_AGENT = "droplets-example"
7 |
8 | token = environ.get("DIGITALOCEAN_TOKEN")
9 | if token == "":
10 | raise Exception("No DigitalOcean API token in DIGITALOCEAN_TOKEN env var")
11 |
12 | if environ.get("DO_OVERWRITE_AGENT"):
13 | # When the `user_agent_overwrite` client setting is True, the `user_agent` value
14 | # sent in the operation method will overwrite the full user agent.
15 | client = Client(token, user_agent=USER_AGENT, user_agent_overwrite=True)
16 | droplets_resp = client.droplets.list(f"{USER_AGENT}-overwritten")
17 | else:
18 | # By default, setting the `user_agent` will prefix the full user agent (which includes
19 | # version details about the generated client, the sdk, and the os/platform)
20 | client = Client(token, user_agent=USER_AGENT)
21 | droplets_resp = client.droplets.list()
22 |
23 | total = droplets_resp["meta"]["total"]
24 |
25 | print(f"TOTAL DROPLETS ({total})\n")
26 | print("ID\tNAME\tSTATUS")
27 | for d in droplets_resp["droplets"]:
28 | print(f"{d['id']}\t{d['name']}\t{d['status']}")
29 |
--------------------------------------------------------------------------------
/examples/poc_droplets_volumes_sshkeys.py:
--------------------------------------------------------------------------------
1 | import os, uuid
2 | from time import sleep
3 | from urllib.parse import urlparse
4 | from urllib.parse import parse_qs
5 |
6 | # Would be nice to not need azure branded imports.
7 | from azure.core.exceptions import HttpResponseError
8 |
9 | from pydo import Client
10 |
11 | REGION = "nyc3"
12 |
13 |
14 | class DigitalOceanError(Exception):
15 | pass
16 |
17 |
18 | class DropletCreator:
19 | def __init__(self, *args, **kwargs):
20 | token = os.environ.get("DIGITALOCEAN_TOKEN")
21 | if token == "":
22 | raise Exception("No DigitalOcean API token in DIGITALOCEAN_TOKEN env var")
23 | self.client = Client(token=os.environ.get("DIGITALOCEAN_TOKEN"))
24 |
25 | def throw(self, message):
26 | raise DigitalOceanError(message) from None
27 |
28 | def main(self):
29 | key_name = os.environ.get("SSH_KEY_NAME")
30 | if key_name == "":
31 | raise Exception("SSH_KEY_NAME not set")
32 | ssh_key = self.find_ssh_key(key_name)
33 |
34 | droplet_req = {
35 | "name": "test-{0}".format(str(uuid.uuid4())),
36 | "region": REGION,
37 | "size": "s-1vcpu-1gb",
38 | "image": "ubuntu-22-04-x64",
39 | "ssh_keys": [ssh_key["fingerprint"]],
40 | }
41 | droplet = self.create_droplet(droplet_req)
42 |
43 | volume_req = {
44 | "size_gigabytes": 10,
45 | "name": "test-{0}".format(str(uuid.uuid4())),
46 | "description": "Block storage testing",
47 | "region": REGION,
48 | "filesystem_type": "ext4",
49 | }
50 | volume = self.create_volume(volume_req)
51 |
52 | print(
53 | "Attaching volume {0} to Droplet {1}...".format(volume["id"], droplet["id"])
54 | )
55 | attach_req = {"type": "attach", "droplet_id": droplet["id"]}
56 | try:
57 | action_resp = self.client.volume_actions.post_by_id(
58 | volume["id"], attach_req
59 | )
60 | self.wait_for_action(action_resp["action"]["id"])
61 | except HttpResponseError as err:
62 | self.throw(
63 | "Error: {0} {1}: {2}".format(
64 | err.status_code, err.reason, err.error.message
65 | )
66 | )
67 |
68 | print("Done!")
69 |
70 | def create_droplet(self, req={}):
71 | print("Creating Droplet using: {0}".format(req))
72 | try:
73 | resp = self.client.droplets.create(body=req)
74 | droplet_id = resp["droplet"]["id"]
75 | self.wait_for_action(resp["links"]["actions"][0]["id"])
76 |
77 | get_resp = self.client.droplets.get(droplet_id)
78 | droplet = get_resp["droplet"]
79 | ip_address = ""
80 | # Would be nice if we could surface the IP address somehow.
81 | # For example godo has the PublicIPv4 method:
82 | # https://github.com/digitalocean/godo/blob/a084002940af6a9b818e3c8fb31a4920356fbb75/droplets.go#L66-L79
83 | for net in droplet["networks"]["v4"]:
84 | if net["type"] == "public":
85 | ip_address = net["ip_address"]
86 | except HttpResponseError as err:
87 | self.throw(
88 | "Error: {0} {1}: {2}".format(
89 | err.status_code, err.reason, err.error.message
90 | )
91 | )
92 | else:
93 | print(
94 | "Droplet ID: {0} Name: {1} IP: {2}".format(
95 | droplet_id, droplet["name"], ip_address
96 | )
97 | )
98 | return droplet
99 |
100 | def wait_for_action(self, id, wait=5):
101 | print("Waiting for action {0} to complete...".format(id), end="", flush=True)
102 | status = "in-progress"
103 | while status == "in-progress":
104 | try:
105 | resp = self.client.actions.get(id)
106 | except HttpResponseError as err:
107 | self.throw(
108 | "Error: {0} {1}: {2}".format(
109 | err.status_code, err.reason, err.error.message
110 | )
111 | )
112 | else:
113 | status = resp["action"]["status"]
114 | if status == "in-progress":
115 | print(".", end="", flush=True)
116 | sleep(wait)
117 | elif status == "errored":
118 | raise Exception(
119 | "{0} action {1} {2}".format(
120 | resp["action"]["type"], resp["action"]["id"], status
121 | )
122 | )
123 | else:
124 | print(".")
125 |
126 | def find_ssh_key(self, name):
127 | print("Looking for ssh key named {0}...".format(name))
128 | page = 1
129 | paginated = True
130 | while paginated:
131 | try:
132 | resp = self.client.ssh_keys.list(per_page=50, page=page)
133 | for k in resp["ssh_keys"]:
134 | if k["name"] == name:
135 | print("Found ssh key: {0}".format(k["fingerprint"]))
136 | return k
137 | except HttpResponseError as err:
138 | self.throw(
139 | "Error: {0} {1}: {2}".format(
140 | err.status_code, err.reason, err.error.message
141 | )
142 | )
143 |
144 | pages = resp.links.pages
145 | if "next" in pages.keys():
146 | # Having to parse the URL to find the next page is not very friendly.
147 | parsed_url = urlparse(pages["next"])
148 | page = parse_qs(parsed_url.query)["page"][0]
149 | else:
150 | paginated = False
151 |
152 | raise Exception("no ssh key found")
153 |
154 | def create_volume(self, req={}):
155 | print("Creating volume using: {0}".format(req))
156 | try:
157 | resp = self.client.volumes.create(body=req)
158 | volume = resp["volume"]
159 | except HttpResponseError as err:
160 | self.throw(
161 | "Error: {0} {1}: {2}".format(
162 | err.status_code, err.reason, err.error.message
163 | )
164 | )
165 | else:
166 | print("Created volume {0} ".format(volume["name"], volume["id"]))
167 | return volume
168 |
169 |
170 | if __name__ == "__main__":
171 | dc = DropletCreator()
172 | dc.main()
173 |
--------------------------------------------------------------------------------
/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@digitalocean/pydo",
3 | "lockfileVersion": 2,
4 | "requires": true,
5 | "packages": {
6 | "": {
7 | "name": "@digitalocean/pydo",
8 | "license": "Apache-2.0",
9 | "devDependencies": {
10 | "@autorest/core": "~3.9.0",
11 | "@autorest/modelerfour": "~4.23.6",
12 | "autorest": "~3.6.1"
13 | }
14 | },
15 | "node_modules/@autorest/core": {
16 | "version": "3.9.0",
17 | "resolved": "https://registry.npmjs.org/@autorest/core/-/core-3.9.0.tgz",
18 | "integrity": "sha512-qLpoSQVwrapzwTXNq/QnWiOZstuNowYdYIKmE5tYwiREvitfi/9dNE3GDEQwo6ailQIkw7EV3W5aWFuxGDo5Xw==",
19 | "dev": true,
20 | "bin": {
21 | "autorest-core": "entrypoints/app.js",
22 | "autorest-language-service": "entrypoints/language-service.js"
23 | },
24 | "engines": {
25 | "node": ">=12.0.0"
26 | }
27 | },
28 | "node_modules/@autorest/modelerfour": {
29 | "version": "4.23.7",
30 | "resolved": "https://registry.npmjs.org/@autorest/modelerfour/-/modelerfour-4.23.7.tgz",
31 | "integrity": "sha512-BVjzDd12LvYSkI20WTUgz4RPPPCYtewaWbYmcztltJzRtRe8T9dbaXjM1bySysI8feJSIpgAja2e+lhyKDKX+w==",
32 | "dev": true,
33 | "engines": {
34 | "node": ">=12.0.0"
35 | }
36 | },
37 | "node_modules/autorest": {
38 | "version": "3.6.1",
39 | "resolved": "https://registry.npmjs.org/autorest/-/autorest-3.6.1.tgz",
40 | "integrity": "sha512-tTOnfQq+LAyqnxFrOOnyCEaErXnjRTgduUN7a8LUv2u5deqDlI0zoJllHeIEYDZS2o2Kr1s8pDj2NxaFPOWldg==",
41 | "dev": true,
42 | "hasInstallScript": true,
43 | "bin": {
44 | "autorest": "entrypoints/app.js"
45 | },
46 | "engines": {
47 | "node": ">=12.0.0"
48 | }
49 | }
50 | },
51 | "dependencies": {
52 | "@autorest/core": {
53 | "version": "3.9.0",
54 | "resolved": "https://registry.npmjs.org/@autorest/core/-/core-3.9.0.tgz",
55 | "integrity": "sha512-qLpoSQVwrapzwTXNq/QnWiOZstuNowYdYIKmE5tYwiREvitfi/9dNE3GDEQwo6ailQIkw7EV3W5aWFuxGDo5Xw==",
56 | "dev": true
57 | },
58 | "@autorest/modelerfour": {
59 | "version": "4.23.7",
60 | "resolved": "https://registry.npmjs.org/@autorest/modelerfour/-/modelerfour-4.23.7.tgz",
61 | "integrity": "sha512-BVjzDd12LvYSkI20WTUgz4RPPPCYtewaWbYmcztltJzRtRe8T9dbaXjM1bySysI8feJSIpgAja2e+lhyKDKX+w==",
62 | "dev": true
63 | },
64 | "autorest": {
65 | "version": "3.6.1",
66 | "resolved": "https://registry.npmjs.org/autorest/-/autorest-3.6.1.tgz",
67 | "integrity": "sha512-tTOnfQq+LAyqnxFrOOnyCEaErXnjRTgduUN7a8LUv2u5deqDlI0zoJllHeIEYDZS2o2Kr1s8pDj2NxaFPOWldg==",
68 | "dev": true
69 | }
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@digitalocean/pydo",
3 | "description": "Python client generator based on DigitalOcean's OpenAPI specification.",
4 | "devDependencies": {
5 | "autorest": "~3.6.1",
6 | "@autorest/core": "~3.9.0",
7 | "@autorest/modelerfour": "~4.23.6"
8 | },
9 | "scripts": {
10 | "autorest": "autorest"
11 | },
12 | "repository": {
13 | "type": "git",
14 | "url": "git+https://github.com/digitalocean/pydo.git"
15 | },
16 | "author": "DigitalOcean API Engineering",
17 | "license": "Apache-2.0",
18 | "bugs": {
19 | "url": "https://github.com/digitalocean/pydo/issues"
20 | },
21 | "homepage": "https://github.com/digitalocean/pydo#readme"
22 | }
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "pydo"
3 | version = "0.12.0"
4 | description = "The official client for interacting with the DigitalOcean API"
5 | authors = ["API Engineering "]
6 | license = "Apache-2.0"
7 | readme = "README.md"
8 | repository = "https://github.com/digitalocean/pydo"
9 | documentation = "https://pydo.readthedocs.io/"
10 | keywords = ["digitalocean", "api", "client"]
11 | classifiers = [
12 | "Development Status :: 4 - Beta",
13 | "Topic :: Software Development :: Libraries :: Python Modules"
14 | ]
15 |
16 | [tool.poetry.dependencies]
17 | python = "^3.8.0"
18 | azure-core = ">=1.24.0"
19 | azure-identity = ">=1.5.0"
20 | isodate = ">=0.6.1"
21 | msrest = ">=0.7.1"
22 | typing-extensions = ">=3.7.4"
23 | aiohttp = { version = ">=3.0", optional = true }
24 |
25 | [tool.poetry.dev-dependencies]
26 | black = "^24.3.0"
27 | pylint = "^2.14.4"
28 | pytest = "^7.1.2"
29 | responses = "^0.21.0"
30 | pytest-asyncio = "^0.19.0"
31 | aioresponses = "^0.7.3"
32 | Sphinx = "^5.3.0"
33 | sphinx-rtd-theme = "^1.1.1"
34 |
35 | [build-system]
36 | requires = ["poetry-core>=1.0.0"]
37 | build-backend = "poetry.core.masonry.api"
38 |
39 | [tool.pylint]
40 | max-line-length=88 # matches black's default
41 |
42 | [tool.pylint.messages_control]
43 | disable = "no-name-in-module"
44 |
45 | [tool.pytest.ini_options]
46 | markers = [
47 | "real_billing: Indicates the test requires a real billing account to test"
48 | ]
49 |
50 | [tool.poetry.extras]
51 | aio = ["aiohttp"]
52 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohappyeyeballs==2.3.5 ; python_version >= "3.8" and python_full_version < "4.0.0"
2 | aiohttp==3.10.2 ; python_version >= "3.8" and python_full_version < "4.0.0"
3 | aioresponses==0.7.6 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
4 | aiosignal==1.3.1 ; python_version >= "3.8" and python_full_version < "4.0.0"
5 | alabaster==0.7.13 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
6 | astroid==2.15.8 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
7 | async-timeout==4.0.3 ; python_version >= "3.8" and python_version < "3.11"
8 | attrs==23.2.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
9 | azure-core==1.30.1 ; python_version >= "3.8" and python_full_version < "4.0.0"
10 | azure-identity==1.16.1 ; python_version >= "3.8" and python_full_version < "4.0.0"
11 | babel==2.15.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
12 | black==24.3.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
13 | certifi==2024.2.2 ; python_version >= "3.8" and python_full_version < "4.0.0"
14 | cffi==1.16.0 ; python_version >= "3.8" and python_full_version < "4.0.0" and platform_python_implementation != "PyPy"
15 | charset-normalizer==3.3.2 ; python_version >= "3.8" and python_full_version < "4.0.0"
16 | click==8.1.7 ; python_version >= "3.8" and python_full_version < "4.0.0"
17 | colorama==0.4.6 ; python_version >= "3.8" and python_full_version < "4.0.0" and (sys_platform == "win32" or platform_system == "Windows")
18 | cryptography==42.0.7 ; python_version >= "3.8" and python_full_version < "4.0.0"
19 | dill==0.3.8 ; python_version >= "3.8" and python_full_version < "4.0.0"
20 | docutils==0.18.1 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
21 | exceptiongroup==1.2.1 ; python_full_version >= "3.8.0" and python_version < "3.11"
22 | frozenlist==1.4.1 ; python_version >= "3.8" and python_full_version < "4.0.0"
23 | idna==3.7 ; python_version >= "3.8" and python_full_version < "4.0.0"
24 | imagesize==1.4.1 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
25 | importlib-metadata==7.1.0 ; python_version >= "3.8" and python_version < "3.10"
26 | iniconfig==2.0.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
27 | isodate==0.6.1 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
28 | isort==5.13.2 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
29 | jinja2==3.1.4 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
30 | lazy-object-proxy==1.10.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
31 | markupsafe==2.1.5 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
32 | mccabe==0.7.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
33 | msal-extensions==1.1.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
34 | msal==1.28.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
35 | msrest==0.7.1 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
36 | multidict==6.0.5 ; python_version >= "3.8" and python_full_version < "4.0.0"
37 | mypy-extensions==1.0.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
38 | oauthlib==3.2.2 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
39 | packaging==24.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
40 | pathspec==0.12.1 ; python_version >= "3.8" and python_full_version < "4.0.0"
41 | platformdirs==4.2.2 ; python_version >= "3.8" and python_full_version < "4.0.0"
42 | pluggy==1.5.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
43 | portalocker==2.8.2 ; python_version >= "3.8" and python_full_version < "4.0.0"
44 | pycparser==2.22 ; python_version >= "3.8" and python_full_version < "4.0.0" and platform_python_implementation != "PyPy"
45 | pygments==2.18.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
46 | pyjwt[crypto]==2.8.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
47 | pylint==2.17.7 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
48 | pytest-asyncio==0.19.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
49 | pytest==7.4.4 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
50 | pytz==2024.1 ; python_version >= "3.8" and python_version < "3.9"
51 | pywin32==306 ; python_version >= "3.8" and python_full_version < "4.0.0" and platform_system == "Windows"
52 | requests-oauthlib==2.0.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
53 | requests==2.32.2 ; python_version >= "3.8" and python_full_version < "4.0.0"
54 | responses==0.21.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
55 | six==1.16.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
56 | snowballstemmer==2.2.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
57 | sphinx-rtd-theme==1.3.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
58 | sphinx==5.3.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
59 | sphinxcontrib-applehelp==1.0.4 ; python_version >= "3.8" and python_full_version < "4.0.0"
60 | sphinxcontrib-devhelp==1.0.2 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
61 | sphinxcontrib-htmlhelp==2.0.1 ; python_version >= "3.8" and python_full_version < "4.0.0"
62 | sphinxcontrib-jquery==4.1 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
63 | sphinxcontrib-jsmath==1.0.1 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
64 | sphinxcontrib-qthelp==1.0.3 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
65 | sphinxcontrib-serializinghtml==1.1.5 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
66 | tomli==2.0.1 ; python_version >= "3.8" and python_version < "3.11"
67 | tomlkit==0.12.5 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
68 | typing-extensions==4.12.0 ; python_version >= "3.8" and python_full_version < "4.0.0"
69 | urllib3==2.2.2 ; python_version >= "3.8" and python_full_version < "4.0.0"
70 | wrapt==1.16.0 ; python_full_version >= "3.8.0" and python_full_version < "4.0.0"
71 | yarl==1.9.4 ; python_version >= "3.8" and python_full_version < "4.0.0"
72 | zipp==3.19.0 ; python_version >= "3.8" and python_version < "3.10"
73 |
--------------------------------------------------------------------------------
/scripts/bumpversion.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -euo pipefail
4 |
5 | ORIGIN=${ORIGIN:-origin}
6 |
7 | # Bump defaults to patch. We provide friendly aliases
8 | # for patch, minor and major
9 | BUMP=${BUMP:-patch}
10 |
11 | poetry_version=$(poetry version)
12 | version="${poetry_version:5}"
13 | IFS='.' read -r major minor patch <<< "$version"
14 |
15 | case "$BUMP" in
16 | feature | minor)
17 | minor=$((minor + 1))
18 | patch=0
19 | ;;
20 | breaking | major)
21 | major=$((major + 1))
22 | minor=0
23 | patch=0
24 | ;;
25 | *)
26 | patch=$((patch + 1))
27 | ;;
28 | esac
29 |
30 | if [[ $(git status --porcelain) != "" ]]; then
31 | echo "Error: repo is dirty. Run git status, clean repo and try again."
32 | exit 1
33 | elif [[ $(git status --porcelain -b | grep -e "ahead" -e "behind") != "" ]]; then
34 | echo "Error: repo has unpushed commits. Bumping the version should not include other changes."
35 | exit 1
36 | fi
37 |
38 | new_version="$major.$minor.$patch"
39 | poetry version "${new_version#v}"
40 |
41 | echo ""
42 |
--------------------------------------------------------------------------------
/scripts/openapi_changelist.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # set -x # Uncomment to debug
4 | set -e
5 |
6 | if [ "$#" -ne 2 ]; then
7 | printf "Exactly two arguments are required.\n\n$(basename "$0") \n"
8 | exit 1
9 | fi
10 |
11 | current_sha=$1
12 | target_sha=$2
13 |
14 | # get_commit_date
15 | # Returns the date the given sha was merged.
16 | function get_commit_date(){
17 |
18 | if [ -z $1 ]; then
19 | echo "get_commit_date() requires 1 argument: "
20 | exit 1
21 | fi
22 |
23 | gh pr --repo digitalocean/openapi list \
24 | -s merged --json number,title,mergedAt,labels,mergeCommit | \
25 | jq -r --arg sha $1 \
26 | '.[] | select(.mergeCommit.oid | startswith($sha)) | .mergedAt'
27 | }
28 |
29 | current_commit_date=$(get_commit_date $1)
30 | test ${#current_commit_date} -eq 20 || (echo "$LINENO: Unexpected value for current_commit_date: $current_commit_date" && exit 1)
31 |
32 | target_commit_date=$(get_commit_date $2)
33 | test ${#target_commit_date} -eq 20 || (echo "$LINENO: Unexpected value for target_commit_date: $target_commit_date" && exit 1)
34 |
35 | echo "## Changelist"
36 | echo
37 | echo "Current commit: digitalocean/openapi@$current_sha ($current_commit_date)"
38 | echo "Target commit: digitalocean/openapi@$target_sha ($target_commit_date)"
39 | echo
40 | gh pr --repo digitalocean/openapi list \
41 | -s merged --json number,title,mergedAt,labels \
42 | --jq '[.[] | {"number": .number, "title": .title, "mergedAt": .mergedAt, "labels": ([.labels[] | .name] | join("|"))}]' | \
43 | jq --arg prev $current_commit_date --arg current $target_commit_date \
44 | '[.[] | select(.mergedAt > $prev) | select(.mergedAt <= $current)]' | \
45 | jq -r '.[] | select(.labels | contains("ignore-for-changelog") | not) | "* digitalocean/openapi#\(.number): \( .title) - \(.mergedAt) [\(.labels)]"'
--------------------------------------------------------------------------------
/scripts/pr_body.md_tmpl:
--------------------------------------------------------------------------------
1 |
2 | ## Python client generation
3 |
4 | The change to regenerate the python client was triggered by
5 | digitalocean/openapi@$TARGET.
6 |
7 | Note: Owners must review to confirm if integration or mocked tests need to be
8 | updated to the client to reflect the changes."
9 |
10 | The following pull requests were merged to digitalocean/openapi@main since the
11 | previous commit (digitalocean/openapi@$CURRENT):
12 |
13 | > Note: each PR should include a list of labels to help categorize the level
14 | (semver) of changes included.
15 |
16 |
--------------------------------------------------------------------------------
/scripts/tag.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -eo pipefail
4 |
5 | ORIGIN=${ORIGIN:-origin}
6 |
7 | if [[ $(git status --porcelain) != "" ]]; then
8 | echo "Error: repo is dirty. Run git status, clean repo and try again."
9 | exit 1
10 | elif [[ $(git status --porcelain -b | grep -e "ahead" -e "behind") != "" ]]; then
11 | echo "Error: repo has unpushed commits. Push commits to remote and try again."
12 | exit 1
13 | fi
14 |
15 | poetry_version=$(poetry version)
16 | tag="v${poetry_version:5}"
17 |
18 | git tag -m "release $tag" -a "$tag" $COMMIT && git push "$ORIGIN" tag "$tag"
19 |
20 | echo ""
--------------------------------------------------------------------------------
/src/pydo/__init__.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | # --------------------------------------------------------------------------
3 | # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.0, generator: @autorest/python@6.13.16)
4 | # Changes may cause incorrect behavior and will be lost if the code is regenerated.
5 | # --------------------------------------------------------------------------
6 |
7 | from ._client import GeneratedClient
8 | from ._version import VERSION
9 |
10 | __version__ = VERSION
11 |
12 | try:
13 | from ._patch import __all__ as _patch_all
14 | from ._patch import * # pylint: disable=unused-wildcard-import
15 | except ImportError:
16 | _patch_all = []
17 | from ._patch import patch_sdk as _patch_sdk
18 |
19 | __all__ = [
20 | "GeneratedClient",
21 | ]
22 | __all__.extend([p for p in _patch_all if p not in __all__])
23 |
24 | _patch_sdk()
25 |
--------------------------------------------------------------------------------
/src/pydo/_configuration.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | # --------------------------------------------------------------------------
3 | # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.0, generator: @autorest/python@6.13.16)
4 | # Changes may cause incorrect behavior and will be lost if the code is regenerated.
5 | # --------------------------------------------------------------------------
6 |
7 | from typing import Any, TYPE_CHECKING
8 |
9 | from azure.core.pipeline import policies
10 |
11 | from ._version import VERSION
12 |
13 | if TYPE_CHECKING:
14 | # pylint: disable=unused-import,ungrouped-imports
15 | from azure.core.credentials import TokenCredential
16 |
17 |
18 | class GeneratedClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
19 | """Configuration for GeneratedClient.
20 |
21 | Note that all parameters used to create this instance are saved as instance
22 | attributes.
23 |
24 | :param credential: Credential needed for the client to connect to Azure. Required.
25 | :type credential: ~azure.core.credentials.TokenCredential
26 | """
27 |
28 | def __init__(self, credential: "TokenCredential", **kwargs: Any) -> None:
29 | if credential is None:
30 | raise ValueError("Parameter 'credential' must not be None.")
31 |
32 | self.credential = credential
33 | self.credential_scopes = kwargs.pop(
34 | "credential_scopes", ["https://api.digitalocean.com"]
35 | )
36 | kwargs.setdefault("sdk_moniker", "generatedclient/{}".format(VERSION))
37 | self.polling_interval = kwargs.get("polling_interval", 30)
38 | self._configure(**kwargs)
39 |
40 | def _configure(self, **kwargs: Any) -> None:
41 | self.user_agent_policy = kwargs.get(
42 | "user_agent_policy"
43 | ) or policies.UserAgentPolicy(**kwargs)
44 | self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(
45 | **kwargs
46 | )
47 | self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
48 | self.logging_policy = kwargs.get(
49 | "logging_policy"
50 | ) or policies.NetworkTraceLoggingPolicy(**kwargs)
51 | self.http_logging_policy = kwargs.get(
52 | "http_logging_policy"
53 | ) or policies.HttpLoggingPolicy(**kwargs)
54 | self.custom_hook_policy = kwargs.get(
55 | "custom_hook_policy"
56 | ) or policies.CustomHookPolicy(**kwargs)
57 | self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(
58 | **kwargs
59 | )
60 | self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
61 | self.authentication_policy = kwargs.get("authentication_policy")
62 | if self.credential and not self.authentication_policy:
63 | self.authentication_policy = policies.BearerTokenCredentialPolicy(
64 | self.credential, *self.credential_scopes, **kwargs
65 | )
66 |
--------------------------------------------------------------------------------
/src/pydo/_patch.py:
--------------------------------------------------------------------------------
1 | # ------------------------------------
2 | # Copyright (c) Microsoft Corporation.
3 | # Licensed under the MIT License.
4 | # ------------------------------------
5 | """Customize generated code here.
6 |
7 | Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
8 | """
9 | from typing import TYPE_CHECKING
10 |
11 | from azure.core.credentials import AccessToken
12 |
13 | from pydo.custom_policies import CustomHttpLoggingPolicy
14 | from pydo import GeneratedClient, _version
15 |
16 | if TYPE_CHECKING:
17 | # pylint: disable=unused-import,ungrouped-imports
18 | from typing import List
19 |
20 |
21 | class TokenCredentials:
22 | """Credential object used for token authentication"""
23 |
24 | def __init__(self, token: str):
25 | self._token = token
26 | self._expires_on = 0
27 |
28 | def get_token(self, *args, **kwargs) -> AccessToken:
29 | return AccessToken(self._token, expires_on=self._expires_on)
30 |
31 |
32 | class Client(GeneratedClient): # type: ignore
33 | """The official DigitalOcean Python client
34 |
35 | :param token: A valid API token.
36 | :type token: str
37 | :keyword endpoint: Service URL. Default value is "https://api.digitalocean.com".
38 | :paramtype endpoint: str
39 | """
40 |
41 | def __init__(self, token: str, *, timeout: int = 120, **kwargs):
42 | logger = kwargs.get("logger")
43 | if logger is not None and kwargs.get("http_logging_policy") == "":
44 | kwargs["http_logging_policy"] = CustomHttpLoggingPolicy(logger=logger)
45 | sdk_moniker = f"pydo/{_version.VERSION}"
46 |
47 | super().__init__(
48 | TokenCredentials(token), timeout=timeout, sdk_moniker=sdk_moniker, **kwargs
49 | )
50 |
51 |
52 | __all__ = ["Client"]
53 |
54 |
55 | def patch_sdk():
56 | """Do not remove from this file.
57 |
58 | `patch_sdk` is a last resort escape hatch that allows you to do customizations
59 | you can't accomplish using the techniques described in
60 | https://aka.ms/azsdk/python/dpcodegen/python/customize
61 | """
62 |
--------------------------------------------------------------------------------
/src/pydo/_version.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | # --------------------------------------------------------------------------
3 | # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.0, generator: @autorest/python@6.13.16)
4 | # Changes may cause incorrect behavior and will be lost if the code is regenerated.
5 | # --------------------------------------------------------------------------
6 |
7 | VERSION = "0.12.0"
8 |
--------------------------------------------------------------------------------
/src/pydo/aio/__init__.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | # --------------------------------------------------------------------------
3 | # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.0, generator: @autorest/python@6.13.16)
4 | # Changes may cause incorrect behavior and will be lost if the code is regenerated.
5 | # --------------------------------------------------------------------------
6 |
7 | from ._client import GeneratedClient
8 |
9 | try:
10 | from ._patch import __all__ as _patch_all
11 | from ._patch import * # pylint: disable=unused-wildcard-import
12 | except ImportError:
13 | _patch_all = []
14 | from ._patch import patch_sdk as _patch_sdk
15 |
16 | __all__ = [
17 | "GeneratedClient",
18 | ]
19 | __all__.extend([p for p in _patch_all if p not in __all__])
20 |
21 | _patch_sdk()
22 |
--------------------------------------------------------------------------------
/src/pydo/aio/_configuration.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | # --------------------------------------------------------------------------
3 | # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.0, generator: @autorest/python@6.13.16)
4 | # Changes may cause incorrect behavior and will be lost if the code is regenerated.
5 | # --------------------------------------------------------------------------
6 |
7 | from typing import Any, TYPE_CHECKING
8 |
9 | from azure.core.pipeline import policies
10 |
11 | from .._version import VERSION
12 |
13 | if TYPE_CHECKING:
14 | # pylint: disable=unused-import,ungrouped-imports
15 | from azure.core.credentials_async import AsyncTokenCredential
16 |
17 |
18 | class GeneratedClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long
19 | """Configuration for GeneratedClient.
20 |
21 | Note that all parameters used to create this instance are saved as instance
22 | attributes.
23 |
24 | :param credential: Credential needed for the client to connect to Azure. Required.
25 | :type credential: ~azure.core.credentials_async.AsyncTokenCredential
26 | """
27 |
28 | def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None:
29 | if credential is None:
30 | raise ValueError("Parameter 'credential' must not be None.")
31 |
32 | self.credential = credential
33 | self.credential_scopes = kwargs.pop(
34 | "credential_scopes", ["https://api.digitalocean.com"]
35 | )
36 | kwargs.setdefault("sdk_moniker", "generatedclient/{}".format(VERSION))
37 | self.polling_interval = kwargs.get("polling_interval", 30)
38 | self._configure(**kwargs)
39 |
40 | def _configure(self, **kwargs: Any) -> None:
41 | self.user_agent_policy = kwargs.get(
42 | "user_agent_policy"
43 | ) or policies.UserAgentPolicy(**kwargs)
44 | self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(
45 | **kwargs
46 | )
47 | self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
48 | self.logging_policy = kwargs.get(
49 | "logging_policy"
50 | ) or policies.NetworkTraceLoggingPolicy(**kwargs)
51 | self.http_logging_policy = kwargs.get(
52 | "http_logging_policy"
53 | ) or policies.HttpLoggingPolicy(**kwargs)
54 | self.custom_hook_policy = kwargs.get(
55 | "custom_hook_policy"
56 | ) or policies.CustomHookPolicy(**kwargs)
57 | self.redirect_policy = kwargs.get(
58 | "redirect_policy"
59 | ) or policies.AsyncRedirectPolicy(**kwargs)
60 | self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(
61 | **kwargs
62 | )
63 | self.authentication_policy = kwargs.get("authentication_policy")
64 | if self.credential and not self.authentication_policy:
65 | self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(
66 | self.credential, *self.credential_scopes, **kwargs
67 | )
68 |
--------------------------------------------------------------------------------
/src/pydo/aio/_patch.py:
--------------------------------------------------------------------------------
1 | # ------------------------------------
2 | # Copyright (c) Microsoft Corporation.
3 | # Licensed under the MIT License.
4 | # ------------------------------------
5 | """Customize generated code here.
6 |
7 | Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
8 | """
9 | from typing import TYPE_CHECKING
10 |
11 | from azure.core.credentials import AccessToken
12 | from azure.core.credentials_async import AsyncTokenCredential
13 |
14 | from pydo import _version
15 | from pydo.custom_policies import CustomHttpLoggingPolicy
16 | from pydo.aio import GeneratedClient
17 |
18 | if TYPE_CHECKING:
19 | # pylint: disable=unused-import,ungrouped-imports
20 | from typing import List
21 |
22 |
23 | class TokenCredentials(AsyncTokenCredential):
24 | """DO Customized Code:
25 | Added to simplify authentication.
26 | """
27 |
28 | def __init__(self, token: str):
29 | self._token = token
30 | self._expires_on = 0
31 |
32 | async def get_token(self, *args, **kwargs) -> AccessToken:
33 | return AccessToken(self._token, expires_on=self._expires_on)
34 |
35 |
36 | class Client(GeneratedClient): # type: ignore
37 | """The official DigitalOcean Python client
38 |
39 | :param token: A valid API token.
40 | :type token: str
41 | :keyword endpoint: Service URL. Default value is "https://api.digitalocean.com".
42 | :paramtype endpoint: str
43 | """
44 |
45 | def __init__(self, token: str, *, timeout: int = 120, **kwargs):
46 | logger = kwargs.get("logger")
47 | if logger is not None and kwargs.get("http_logging_policy") == "":
48 | kwargs["http_logging_policy"] = CustomHttpLoggingPolicy(logger=logger)
49 | sdk_moniker = f"pydo/{_version.VERSION}"
50 |
51 | super().__init__(
52 | TokenCredentials(token), timeout=timeout, sdk_moniker=sdk_moniker, **kwargs
53 | )
54 |
55 |
56 | # Add all objects you want publicly available to users at this package level
57 | __all__ = ["Client"] # type: List[str]
58 |
59 |
60 | def patch_sdk():
61 | """Do not remove from this file.
62 |
63 | `patch_sdk` is a last resort escape hatch that allows you to do customizations
64 | you can't accomplish using the techniques described in
65 | https://aka.ms/azsdk/python/dpcodegen/python/customize
66 | """
67 |
--------------------------------------------------------------------------------
/src/pydo/aio/operations/__init__.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | # --------------------------------------------------------------------------
3 | # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.0, generator: @autorest/python@6.13.16)
4 | # Changes may cause incorrect behavior and will be lost if the code is regenerated.
5 | # --------------------------------------------------------------------------
6 |
7 | from ._operations import OneClicksOperations
8 | from ._operations import AccountOperations
9 | from ._operations import SshKeysOperations
10 | from ._operations import ActionsOperations
11 | from ._operations import AppsOperations
12 | from ._operations import CdnOperations
13 | from ._operations import CertificatesOperations
14 | from ._operations import BalanceOperations
15 | from ._operations import BillingHistoryOperations
16 | from ._operations import InvoicesOperations
17 | from ._operations import DatabasesOperations
18 | from ._operations import DomainsOperations
19 | from ._operations import DropletsOperations
20 | from ._operations import DropletActionsOperations
21 | from ._operations import AutoscalepoolsOperations
22 | from ._operations import FirewallsOperations
23 | from ._operations import FunctionsOperations
24 | from ._operations import ImagesOperations
25 | from ._operations import ImageActionsOperations
26 | from ._operations import KubernetesOperations
27 | from ._operations import LoadBalancersOperations
28 | from ._operations import MonitoringOperations
29 | from ._operations import PartnerAttachmentsOperations
30 | from ._operations import ProjectsOperations
31 | from ._operations import RegionsOperations
32 | from ._operations import RegistriesOperations
33 | from ._operations import RegistryOperations
34 | from ._operations import ReservedIPsOperations
35 | from ._operations import ReservedIPsActionsOperations
36 | from ._operations import ReservedIPv6Operations
37 | from ._operations import ReservedIPv6ActionsOperations
38 | from ._operations import SizesOperations
39 | from ._operations import SnapshotsOperations
40 | from ._operations import SpacesKeyOperations
41 | from ._operations import TagsOperations
42 | from ._operations import VolumesOperations
43 | from ._operations import VolumeActionsOperations
44 | from ._operations import VolumeSnapshotsOperations
45 | from ._operations import VpcsOperations
46 | from ._operations import VpcPeeringsOperations
47 | from ._operations import UptimeOperations
48 | from ._operations import GenaiOperations
49 |
50 | from ._patch import __all__ as _patch_all
51 | from ._patch import * # pylint: disable=unused-wildcard-import
52 | from ._patch import patch_sdk as _patch_sdk
53 |
54 | __all__ = [
55 | "OneClicksOperations",
56 | "AccountOperations",
57 | "SshKeysOperations",
58 | "ActionsOperations",
59 | "AppsOperations",
60 | "CdnOperations",
61 | "CertificatesOperations",
62 | "BalanceOperations",
63 | "BillingHistoryOperations",
64 | "InvoicesOperations",
65 | "DatabasesOperations",
66 | "DomainsOperations",
67 | "DropletsOperations",
68 | "DropletActionsOperations",
69 | "AutoscalepoolsOperations",
70 | "FirewallsOperations",
71 | "FunctionsOperations",
72 | "ImagesOperations",
73 | "ImageActionsOperations",
74 | "KubernetesOperations",
75 | "LoadBalancersOperations",
76 | "MonitoringOperations",
77 | "PartnerAttachmentsOperations",
78 | "ProjectsOperations",
79 | "RegionsOperations",
80 | "RegistriesOperations",
81 | "RegistryOperations",
82 | "ReservedIPsOperations",
83 | "ReservedIPsActionsOperations",
84 | "ReservedIPv6Operations",
85 | "ReservedIPv6ActionsOperations",
86 | "SizesOperations",
87 | "SnapshotsOperations",
88 | "SpacesKeyOperations",
89 | "TagsOperations",
90 | "VolumesOperations",
91 | "VolumeActionsOperations",
92 | "VolumeSnapshotsOperations",
93 | "VpcsOperations",
94 | "VpcPeeringsOperations",
95 | "UptimeOperations",
96 | "GenaiOperations",
97 | ]
98 | __all__.extend([p for p in _patch_all if p not in __all__])
99 | _patch_sdk()
100 |
--------------------------------------------------------------------------------
/src/pydo/aio/operations/_patch.py:
--------------------------------------------------------------------------------
1 | # ------------------------------------
2 | # Copyright (c) Microsoft Corporation.
3 | # Licensed under the MIT License.
4 | # ------------------------------------
5 | """Customize generated code here.
6 |
7 | Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
8 | """
9 | from typing import TYPE_CHECKING
10 |
11 | if TYPE_CHECKING:
12 | # pylint: disable=unused-import,ungrouped-imports
13 | from typing import List
14 |
15 | __all__ = (
16 | []
17 | ) # type: List[str] # Add all objects you want publicly available to users at this package level
18 |
19 |
20 | def patch_sdk():
21 | """Do not remove from this file.
22 |
23 | `patch_sdk` is a last resort escape hatch that allows you to do customizations
24 | you can't accomplish using the techniques described in
25 | https://aka.ms/azsdk/python/dpcodegen/python/customize
26 | """
27 |
--------------------------------------------------------------------------------
/src/pydo/custom_policies.py:
--------------------------------------------------------------------------------
1 | from azure.core.pipeline.policies import HttpLoggingPolicy
2 |
3 |
4 | class CustomHttpLoggingPolicy(HttpLoggingPolicy):
5 |
6 | # ALLOWED_HEADERS lists headers that will not be redacted when logging
7 | ALLOWED_HEADERS = set(
8 | [
9 | "x-request-id",
10 | "ratelimit-limit",
11 | "ratelimit-remaining",
12 | "ratelimit-reset",
13 | "x-gateway",
14 | "x-request-id",
15 | "x-response-from",
16 | "CF-Cache-Status",
17 | "Expect-CT",
18 | "Server",
19 | "CF-RAY",
20 | "Content-Encoding",
21 | ]
22 | )
23 |
24 | def __init__(self, logger=None, **kwargs):
25 | super().__init__(logger, **kwargs)
26 | self.allowed_header_names.update(self.ALLOWED_HEADERS)
27 |
--------------------------------------------------------------------------------
/src/pydo/exceptions.py:
--------------------------------------------------------------------------------
1 | """Exceptions"""
2 |
3 | # pylint: disable=unused-import
4 | # Importing exceptions this way makes them accessible through this module.
5 | # Therefore, obscuring azure packages from the end user
6 | from azure.core.exceptions import HttpResponseError
7 |
--------------------------------------------------------------------------------
/src/pydo/operations/__init__.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | # --------------------------------------------------------------------------
3 | # Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.9.0, generator: @autorest/python@6.13.16)
4 | # Changes may cause incorrect behavior and will be lost if the code is regenerated.
5 | # --------------------------------------------------------------------------
6 |
7 | from ._operations import OneClicksOperations
8 | from ._operations import AccountOperations
9 | from ._operations import SshKeysOperations
10 | from ._operations import ActionsOperations
11 | from ._operations import AppsOperations
12 | from ._operations import CdnOperations
13 | from ._operations import CertificatesOperations
14 | from ._operations import BalanceOperations
15 | from ._operations import BillingHistoryOperations
16 | from ._operations import InvoicesOperations
17 | from ._operations import DatabasesOperations
18 | from ._operations import DomainsOperations
19 | from ._operations import DropletsOperations
20 | from ._operations import DropletActionsOperations
21 | from ._operations import AutoscalepoolsOperations
22 | from ._operations import FirewallsOperations
23 | from ._operations import FunctionsOperations
24 | from ._operations import ImagesOperations
25 | from ._operations import ImageActionsOperations
26 | from ._operations import KubernetesOperations
27 | from ._operations import LoadBalancersOperations
28 | from ._operations import MonitoringOperations
29 | from ._operations import PartnerAttachmentsOperations
30 | from ._operations import ProjectsOperations
31 | from ._operations import RegionsOperations
32 | from ._operations import RegistriesOperations
33 | from ._operations import RegistryOperations
34 | from ._operations import ReservedIPsOperations
35 | from ._operations import ReservedIPsActionsOperations
36 | from ._operations import ReservedIPv6Operations
37 | from ._operations import ReservedIPv6ActionsOperations
38 | from ._operations import SizesOperations
39 | from ._operations import SnapshotsOperations
40 | from ._operations import SpacesKeyOperations
41 | from ._operations import TagsOperations
42 | from ._operations import VolumesOperations
43 | from ._operations import VolumeActionsOperations
44 | from ._operations import VolumeSnapshotsOperations
45 | from ._operations import VpcsOperations
46 | from ._operations import VpcPeeringsOperations
47 | from ._operations import UptimeOperations
48 | from ._operations import GenaiOperations
49 |
50 | from ._patch import __all__ as _patch_all
51 | from ._patch import * # pylint: disable=unused-wildcard-import
52 | from ._patch import patch_sdk as _patch_sdk
53 |
54 | __all__ = [
55 | "OneClicksOperations",
56 | "AccountOperations",
57 | "SshKeysOperations",
58 | "ActionsOperations",
59 | "AppsOperations",
60 | "CdnOperations",
61 | "CertificatesOperations",
62 | "BalanceOperations",
63 | "BillingHistoryOperations",
64 | "InvoicesOperations",
65 | "DatabasesOperations",
66 | "DomainsOperations",
67 | "DropletsOperations",
68 | "DropletActionsOperations",
69 | "AutoscalepoolsOperations",
70 | "FirewallsOperations",
71 | "FunctionsOperations",
72 | "ImagesOperations",
73 | "ImageActionsOperations",
74 | "KubernetesOperations",
75 | "LoadBalancersOperations",
76 | "MonitoringOperations",
77 | "PartnerAttachmentsOperations",
78 | "ProjectsOperations",
79 | "RegionsOperations",
80 | "RegistriesOperations",
81 | "RegistryOperations",
82 | "ReservedIPsOperations",
83 | "ReservedIPsActionsOperations",
84 | "ReservedIPv6Operations",
85 | "ReservedIPv6ActionsOperations",
86 | "SizesOperations",
87 | "SnapshotsOperations",
88 | "SpacesKeyOperations",
89 | "TagsOperations",
90 | "VolumesOperations",
91 | "VolumeActionsOperations",
92 | "VolumeSnapshotsOperations",
93 | "VpcsOperations",
94 | "VpcPeeringsOperations",
95 | "UptimeOperations",
96 | "GenaiOperations",
97 | ]
98 | __all__.extend([p for p in _patch_all if p not in __all__])
99 | _patch_sdk()
100 |
--------------------------------------------------------------------------------
/src/pydo/operations/_patch.py:
--------------------------------------------------------------------------------
1 | # ------------------------------------
2 | # Copyright (c) Microsoft Corporation.
3 | # Licensed under the MIT License.
4 | # ------------------------------------
5 | """Customize generated code here.
6 |
7 | Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize
8 | """
9 | from typing import TYPE_CHECKING
10 |
11 | from ._operations import DropletsOperations as Droplets
12 |
13 | if TYPE_CHECKING:
14 | # pylint: disable=unused-import,ungrouped-imports
15 | pass
16 |
17 |
18 | __all__ = []
19 |
20 |
21 | def patch_sdk():
22 | """Do not remove from this file.
23 |
24 | `patch_sdk` is a last resort escape hatch that allows you to do customizations
25 | you can't accomplish using the techniques described in
26 | https://aka.ms/azsdk/python/dpcodegen/python/customize
27 | """
28 |
--------------------------------------------------------------------------------
/src/pydo/py.typed:
--------------------------------------------------------------------------------
1 | # Marker file for PEP 561.
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/digitalocean/pydo/54e85481359284d7ffdb0d771185f6d1d998dbc2/tests/__init__.py
--------------------------------------------------------------------------------
/tests/integration/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/digitalocean/pydo/54e85481359284d7ffdb0d771185f6d1d998dbc2/tests/integration/__init__.py
--------------------------------------------------------------------------------
/tests/integration/conftest.py:
--------------------------------------------------------------------------------
1 | """Pytest configuration for integration tests."""
2 |
3 | from os import environ
4 |
5 | import pytest
6 |
7 | from cryptography.hazmat.backends import default_backend as crypto_default_backend
8 | from cryptography.hazmat.primitives import serialization as crypto_serialization
9 | from cryptography.hazmat.primitives.asymmetric import rsa
10 |
11 | from pydo import Client
12 | from pydo.aio import Client as aioClient
13 |
14 |
15 | @pytest.fixture(scope="session")
16 | def integration_client() -> Client:
17 | """Instantiates a pydo Client for use with integration tests.
18 |
19 | The client requires the environment variable DIGITALOCEAN_TOKEN with a valid API
20 | token.
21 |
22 | *IMPORTANT*: Use of this client will create real resources on the
23 | account.
24 | """
25 |
26 | token = environ.get("DIGITALOCEAN_TOKEN", None)
27 |
28 | if token is None:
29 | pytest.fail("Expected environment variable DIGITALOCEAN_TOKEN")
30 |
31 | client = Client(token)
32 | return client
33 |
34 |
35 | @pytest.fixture(scope="session")
36 | def async_integration_client() -> aioClient:
37 | """Instantiates a pydo Client for use with integration tests.
38 |
39 | The client requires the environment variable DIGITALOCEAN_TOKEN with a valid API
40 | token.
41 |
42 | *IMPORTANT*: Use of this client will create real resources on the
43 | account.
44 | """
45 |
46 | token = environ.get("DIGITALOCEAN_TOKEN", None)
47 |
48 | if token is None:
49 | pytest.fail("Expected environment variable DIGITALOCEAN_TOKEN")
50 |
51 | client = aioClient(token)
52 | return client
53 |
54 |
55 | @pytest.fixture(scope="session")
56 | def public_key() -> bytes:
57 | """Create SSH public key material."""
58 | key = rsa.generate_private_key(
59 | backend=crypto_default_backend(), public_exponent=65537, key_size=2048
60 | )
61 |
62 | public_key_material = key.public_key().public_bytes(
63 | crypto_serialization.Encoding.OpenSSH, crypto_serialization.PublicFormat.OpenSSH
64 | )
65 |
66 | return public_key_material
67 |
68 |
69 | @pytest.fixture(scope="module")
70 | def invoice_uuid_param():
71 | """Gets invoice UUID"""
72 | invoice_uuid = environ.get("INVOICE_UUID_PARAM", None)
73 |
74 | if invoice_uuid is None:
75 | pytest.fail("Expected environment variable INVOICE_UUID_PARAM")
76 |
77 |
78 | @pytest.fixture(scope="session")
79 | def spaces_endpoint() -> str:
80 | """Get the spaces endpoint"""
81 | spaces = environ.get("SPACES_ENDPOINT", None)
82 |
83 | if spaces is None:
84 | pytest.fail("Expected environment variable SPACES_ENDPOINT")
85 |
86 | return spaces
87 |
--------------------------------------------------------------------------------
/tests/integration/defaults.py:
--------------------------------------------------------------------------------
1 | """Integration tests default values
2 |
3 | Overwrite default values with environment variables.
4 | """
5 |
6 | from os import environ
7 |
8 | PREFIX = "cgtest"
9 |
10 | # CUSTOMIZABLE
11 | DROPLET_SIZE = environ.get("DO_DROPLET_SIZE") or "s-1vcpu-1gb"
12 | DROPLET_IMAGE = environ.get("DO_DROPLET_IMAGE") or "ubuntu-22-04-x64"
13 |
14 | K8S_VERSION = environ.get("DO_K8S_VERSION") or "latest"
15 | K8S_NODE_SIZE = environ.get("DO_K8S_NODE_SIZE") or "s-1vcpu-2gb"
16 |
17 | REGION = environ.get("DO_REGION") or "nyc3"
18 |
19 | INVOICE_UUID_PARM = environ.get("DO_INVOICE_UUID") or "something"
20 |
--------------------------------------------------------------------------------
/tests/integration/test_actions.py:
--------------------------------------------------------------------------------
1 | """ test_actions.py
2 | Integration Test for Domains
3 | """
4 |
5 | from pydo import Client
6 |
7 |
8 | def test_actions(integration_client: Client):
9 | """Testing the List and Gets
10 | of the actions endpoint
11 | """
12 |
13 | list_resp = integration_client.actions.list()
14 |
15 | assert list_resp is not None
16 |
17 | action_id = list_resp["actions"][0]["id"] or 0
18 |
19 | assert action_id != 0
20 |
21 | get_resp = integration_client.actions.get(action_id)
22 |
23 | assert action_id == get_resp["action"]["id"]
24 |
--------------------------------------------------------------------------------
/tests/integration/test_app.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=line-too-long
2 |
3 | """ test_app.py
4 | Integration tests for apps.
5 | """
6 |
7 | import uuid
8 |
9 | from tests.integration import defaults
10 | from tests.integration import shared
11 | from pydo import Client
12 |
13 |
14 | def test_app_lifecycle(integration_client: Client):
15 | """Tests the entire lifecycle of an app
16 | Creates
17 | Lists
18 | Updates
19 | Deletes
20 | """
21 | name = f"{defaults.PREFIX}-{uuid.uuid4().hex[:10]}"
22 | create_payload = {
23 | "spec": {
24 | "name": name,
25 | "region": "nyc",
26 | "alerts": [{"rule": "DEPLOYMENT_LIVE"}],
27 | "services": [
28 | {
29 | "name": "api",
30 | "git": {
31 | "branch": "main",
32 | "repo_clone_url": "https://github.com/digitalocean/sample-golang.git",
33 | },
34 | "environment_slug": "go",
35 | "instance_count": 2,
36 | "instance_size_slug": "professional-xs",
37 | "routes": [{"path": "/"}],
38 | }
39 | ],
40 | }
41 | }
42 |
43 | propose_resp = integration_client.apps.validate_app_spec(create_payload)
44 |
45 | assert propose_resp["app_name_available"] is True
46 |
47 | with shared.with_test_app(integration_client, create_payload) as app:
48 | list_resp = integration_client.apps.list()
49 |
50 | app_id = app["app"]["id"]
51 |
52 | assert app_id in [app["id"] for app in list_resp["apps"]]
53 |
54 | # An app may not have any alerts once running
55 | alerts_resp = integration_client.apps.list_alerts(app_id)
56 |
57 | assert alerts_resp is not None
58 | assert alerts_resp["alerts"][0]["spec"]["rule"] == "DEPLOYMENT_LIVE"
59 |
60 | alert_id = alerts_resp["alerts"][0]["id"]
61 |
62 | # assign_alert_destinations requires an email address that has access to the app
63 | account_resp = integration_client.account.get()
64 | assert account_resp is not None
65 | alert_req = {"emails": [account_resp["account"]["email"]]}
66 |
67 | alert_resp = integration_client.apps.assign_alert_destinations(
68 | app_id, alert_id, alert_req
69 | )
70 |
71 | assert alert_resp is not None
72 |
73 | config = app["app"]["spec"]
74 | config["region"] = "ams"
75 | update_payload = {}
76 | update_payload["spec"] = config
77 |
78 | update_resp = integration_client.apps.update(app_id, update_payload)
79 |
80 | assert update_resp["app"]["spec"]["region"] == "ams"
81 |
82 |
83 | def test_app_info(integration_client: Client):
84 | """Tests all information endpoints"""
85 |
86 | list_instance_sizes = integration_client.apps.list_instance_sizes()
87 |
88 | assert len(list_instance_sizes["instance_sizes"]) >= 4
89 |
90 | get_instance_size = integration_client.apps.get_instance_size("basic-xxs")
91 |
92 | assert get_instance_size["instance_size"]["slug"] == "basic-xxs"
93 |
94 | list_regions = integration_client.apps.list_regions()
95 |
96 | assert len(list_regions["regions"]) >= 5
97 |
98 |
99 | def test_app_metrics_list_bandwidth_daily(integration_client: Client):
100 | """Tests listing the bandwidth_day metrics for multiple apps"""
101 |
102 | name1 = f"{defaults.PREFIX}-{uuid.uuid4().hex[:10]}"
103 | name2 = f"{defaults.PREFIX}-{uuid.uuid4().hex[:10]}"
104 |
105 | create_app_req1 = {
106 | "spec": {
107 | "name": name1,
108 | "region": "nyc",
109 | "alerts": [{"rule": "DEPLOYMENT_LIVE"}],
110 | "services": [
111 | {
112 | "name": "api",
113 | "git": {
114 | "branch": "main",
115 | "repo_clone_url": "https://github.com/digitalocean/sample-golang.git",
116 | },
117 | "environment_slug": "go",
118 | "instance_count": 2,
119 | "instance_size_slug": "professional-xs",
120 | "routes": [{"path": "/"}],
121 | }
122 | ],
123 | }
124 | }
125 |
126 | create_app_req2 = {
127 | "spec": {
128 | "name": name2,
129 | "region": "nyc",
130 | "alerts": [{"rule": "DEPLOYMENT_LIVE"}],
131 | "services": [
132 | {
133 | "name": "api",
134 | "git": {
135 | "branch": "main",
136 | "repo_clone_url": "https://github.com/digitalocean/sample-golang.git",
137 | },
138 | "environment_slug": "go",
139 | "instance_count": 2,
140 | "instance_size_slug": "professional-xs",
141 | "routes": [{"path": "/"}],
142 | }
143 | ],
144 | }
145 | }
146 |
147 | with shared.with_test_app(integration_client, create_app_req1) as test_app_resp1:
148 | with shared.with_test_app(
149 | integration_client, create_app_req2
150 | ) as test_app_resp2:
151 | app_id1 = test_app_resp1["app"]["id"]
152 | app_id2 = test_app_resp2["app"]["id"]
153 |
154 | metrics_resp = integration_client.apps.list_metrics_bandwidth_daily(
155 | {"app_ids": [app_id1, app_id2]}
156 | )
157 |
158 | assert "app_bandwidth_usage" in metrics_resp.keys()
159 | resp_app_ids = [
160 | metrics["app_id"] for metrics in metrics_resp["app_bandwidth_usage"]
161 | ]
162 | assert app_id1 in resp_app_ids
163 | assert app_id2 in resp_app_ids
164 |
165 |
166 | def test_app_metrics_get_bandwidth_daily(integration_client: Client):
167 | """Tests getting the bandwidth_daily metrics for one app"""
168 |
169 | create_app_req = {
170 | "spec": {
171 | "name": f"{defaults.PREFIX}-{uuid.uuid4().hex[:10]}",
172 | "region": "nyc",
173 | "alerts": [{"rule": "DEPLOYMENT_LIVE"}],
174 | "services": [
175 | {
176 | "name": "api",
177 | "git": {
178 | "branch": "main",
179 | "repo_clone_url": "https://github.com/digitalocean/sample-golang.git",
180 | },
181 | "environment_slug": "go",
182 | "instance_count": 2,
183 | "instance_size_slug": "professional-xs",
184 | "routes": [{"path": "/"}],
185 | }
186 | ],
187 | }
188 | }
189 | with shared.with_test_app(integration_client, create_app_req) as test_app_resp:
190 | app_id = test_app_resp["app"]["id"]
191 | metrics_resp = integration_client.apps.get_metrics_bandwidth_daily(app_id)
192 |
193 | assert "app_bandwidth_usage" in metrics_resp.keys()
194 | assert metrics_resp["app_bandwidth_usage"][0]["app_id"] == app_id
195 |
--------------------------------------------------------------------------------
/tests/integration/test_app_deployment.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=duplicate-code
2 | # pylint: disable=line-too-long
3 | # pylint: disable=too-many-locals
4 | """ test_app_deployment.py
5 | Integration tests for app deployments
6 | """
7 |
8 | import time
9 | import uuid
10 | from tests.integration import defaults
11 | from tests.integration import shared
12 | from pydo import Client
13 |
14 |
15 | def test_app_deployment_lifecycle(integration_client: Client):
16 | """Tests the app deployment endpoints"""
17 |
18 | name = f"{defaults.PREFIX}-{uuid.uuid4().hex[:10]}"
19 | create_payload = {
20 | "spec": {
21 | "name": name,
22 | "region": "nyc",
23 | "services": [
24 | {
25 | "name": "api",
26 | "git": {
27 | "branch": "main",
28 | "repo_clone_url": "https://github.com/digitalocean/sample-golang.git",
29 | },
30 | "run_command": "bin/api",
31 | "environment_slug": "go",
32 | "instance_count": 2,
33 | "instance_size_slug": "professional-xs",
34 | "routes": [{"path": "/"}],
35 | }
36 | ],
37 | }
38 | }
39 |
40 | with shared.with_test_app(integration_client, create_payload) as app:
41 | app_id = app["app"]["id"]
42 | app_deployment = integration_client.apps.create_deployment(
43 | app_id, {"force_build": True}
44 | )
45 |
46 | deployment_id = app_deployment["deployment"]["id"]
47 |
48 | list_deployments = integration_client.apps.list_deployments(app_id)
49 |
50 | assert deployment_id in [
51 | deployment["id"] for deployment in list_deployments["deployments"]
52 | ]
53 |
54 | single_deployment = integration_client.apps.get_deployment(
55 | app_id, deployment_id
56 | )
57 |
58 | assert deployment_id == single_deployment["deployment"]["id"]
59 |
60 | # Deployment logs are not available until the deployment is complete.
61 | # Sleep until the build is finished. 😴
62 | time.sleep(120)
63 |
64 | agg_logs = integration_client.apps.get_logs_aggregate(
65 | app_id, deployment_id, type="BUILD"
66 | )
67 |
68 | assert agg_logs is not None
69 |
70 | logs = integration_client.apps.get_logs(
71 | app_id, deployment_id, "component", type="BUILD"
72 | )
73 |
74 | assert logs is not None
75 |
76 | cancel_deployment = integration_client.apps.cancel_deployment(
77 | app_id, deployment_id
78 | )
79 |
80 | assert deployment_id == cancel_deployment["deployment"]["id"]
81 |
82 | validate_rollback_req = {"deployment_id": deployment_id, "skip_pin": False}
83 |
84 | validate_resp = integration_client.apps.validate_rollback(
85 | app_id, validate_rollback_req
86 | )
87 |
88 | assert validate_resp["valid"] is True
89 |
90 | rollback_req = validate_rollback_req
91 |
92 | rollback_resp = integration_client.apps.create_rollback(app_id, rollback_req)
93 |
94 | assert rollback_resp["deployment"]["id"] is not None
95 |
96 | integration_client.apps.commit_rollback(app_id)
97 |
98 | revert_resp = integration_client.apps.revert_rollback(app_id)
99 |
100 | assert revert_resp["deployment"]["id"] is not None
101 |
--------------------------------------------------------------------------------
/tests/integration/test_billing.py:
--------------------------------------------------------------------------------
1 | """ test_billing.py
2 | Integration Test for Billing
3 | """
4 |
5 | import os
6 | import pytest
7 | from pydo import Client
8 |
9 |
10 | pytestmark = pytest.mark.real_billing
11 |
12 |
13 | def test_billing_get_balance(integration_client: Client):
14 | """Testing billing's GET customer balance of billing."""
15 |
16 | get_resp = integration_client.balance.get()
17 |
18 | assert get_resp["account_balance"] == "0.00"
19 |
20 |
21 | def test_billing_list_history(integration_client: Client):
22 | """Testing listing billing history."""
23 |
24 | get_resp = integration_client.billing_history.list()
25 |
26 | assert (
27 | get_resp["billing_history"][0]["type"] == "Invoice"
28 | or get_resp["billing_history"][0]["type"] == "Payment"
29 | )
30 |
31 |
32 | def test_billing_list_invoices(integration_client: Client):
33 | """Testing listing invoice."""
34 |
35 | get_resp = integration_client.billing_history.list()
36 |
37 | assert (
38 | get_resp["billing_history"][0]["type"] == "Invoice"
39 | or get_resp["billing_history"][0]["type"] == "Payment"
40 | )
41 |
42 |
43 | def test_billing_get_invoice_by_uuid(integration_client: Client, invoice_uuid_param):
44 | """Testing GETting invoice by uuid."""
45 |
46 | get_resp = integration_client.invoices.get_by_uuid(invoice_uuid=invoice_uuid_param)
47 |
48 | assert get_resp["billing_history"]["type"] == "Invoice"
49 |
50 |
51 | def test_billing_get_invoice_csv_by_uuid(
52 | integration_client: Client, invoice_uuid_param
53 | ):
54 | """Testing GETting invoice csv by invoice uuid."""
55 |
56 | get_resp = integration_client.invoices.get_csv_by_uuid(
57 | invoice_uuid=invoice_uuid_param
58 | )
59 |
60 | assert "product,group_description," in get_resp
61 |
62 |
63 | def test_billing_get_invoice_pdf_by_uuid(
64 | integration_client: Client, invoice_uuid_param
65 | ):
66 | """Testing GETting invoice pdf by invoice uuid."""
67 |
68 | get_resp = integration_client.invoices.get_pdf_by_uuid(
69 | invoice_uuid=invoice_uuid_param
70 | )
71 |
72 | pdf_bytes = list(get_resp)[0]
73 |
74 | with open("tests/integration/invoice.pdf", "w", encoding="utf8") as file:
75 | file.write(str(pdf_bytes))
76 | file.close()
77 |
78 | assert os.path.getsize("tests/integration/invoice.pdf") > 0
79 | os.remove("tests/integration/invoice.pdf")
80 |
81 |
82 | def test_billing_get_invoice_summary_by_uuid(
83 | integration_client: Client, invoice_uuid_param
84 | ):
85 | """Testing GETting invoice summary by uuid."""
86 |
87 | get_resp = integration_client.invoices.get_summary_by_uuid(
88 | invoice_uuid=invoice_uuid_param
89 | )
90 |
91 | assert get_resp["user_company"] == "DigitalOcean"
92 |
--------------------------------------------------------------------------------
/tests/integration/test_block_storage.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=duplicate-code
2 | """ test_block_storage.py
3 | Integration tests for block storage.
4 | """
5 |
6 | import uuid
7 |
8 | from tests.integration import defaults
9 | from tests.integration import shared
10 | from pydo import Client
11 |
12 |
13 | def test_block_storage_snapshots(integration_client: Client):
14 | """Tests listing, retrieving, and deleting a block storage snapshot."""
15 |
16 | volume_req = {
17 | "size_gigabytes": 10,
18 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
19 | "description": "Snapshots testing",
20 | "region": defaults.REGION,
21 | "filesystem_type": "ext4",
22 | }
23 |
24 | with shared.with_test_volume(integration_client, **volume_req) as volume:
25 | vol_id = volume["volume"]["id"]
26 | expected_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
27 |
28 | # create snapshot from volume
29 | vol_attach_resp = integration_client.volume_snapshots.create(
30 | volume_id=vol_id, body={"name": expected_name}
31 | )
32 | assert vol_attach_resp["snapshot"]["name"] == expected_name
33 | snap_id = vol_attach_resp["snapshot"]["id"]
34 |
35 | # list snapshots for a volume
36 | list_resp = integration_client.volume_snapshots.list(volume_id=vol_id)
37 | assert len(list_resp["snapshots"]) > 0
38 |
39 | # get an existing snapshot of a volume
40 | get_resp = integration_client.volume_snapshots.get_by_id(snapshot_id=snap_id)
41 | assert get_resp["snapshot"]["name"] == expected_name
42 |
43 | # delete a volume snapshot
44 | delete_resp = integration_client.volume_snapshots.delete_by_id(
45 | snapshot_id=snap_id
46 | )
47 | assert delete_resp is None
48 |
49 |
50 | def test_block_storage(integration_client: Client):
51 | """Tests listing, retrieving, and deleting a block storage."""
52 |
53 | volume_req = {
54 | "size_gigabytes": 10,
55 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
56 | "description": "Snapshots testing",
57 | "region": defaults.REGION,
58 | "filesystem_type": "ext4",
59 | }
60 |
61 | # create volume
62 | volume = integration_client.volumes.create(body=volume_req)
63 | volume_id = volume["volume"]["id"] or ""
64 | assert volume_id != ""
65 |
66 | # list volumes
67 | list_resp = integration_client.volumes.list()
68 | assert len(list_resp["volumes"]) > 0
69 |
70 | # get an existing volume
71 | get_resp = integration_client.volumes.get(volume_id=volume_id)
72 | assert get_resp["volume"]["name"] == volume["volume"]["name"]
73 |
74 | # delete volume by name
75 | delete_resp = integration_client.volumes.delete_by_name(
76 | name=volume["volume"]["name"], region=defaults.REGION
77 | )
78 | assert delete_resp is None
79 |
--------------------------------------------------------------------------------
/tests/integration/test_cdn.py:
--------------------------------------------------------------------------------
1 | """ test_cdn.py
2 | Integration tests for CDN
3 | """
4 |
5 | from tests.integration import shared
6 | from pydo import Client
7 |
8 |
9 | def test_cdn_lifecycle(integration_client: Client, spaces_endpoint: str):
10 | """Tests the complete lifecycle of a CDN
11 | Creates, Lists, Gets, Updates, Deletes, Purges.
12 | """
13 |
14 | cdn_req = {"origin": spaces_endpoint, "ttl": 3600}
15 |
16 | with shared.with_test_cdn(integration_client, cdn_req) as cdn:
17 | cdn_id = cdn["endpoint"]["id"]
18 |
19 | list_resp = integration_client.cdn.list_endpoints()
20 |
21 | assert cdn_id in [endpoints["id"] for endpoints in list_resp["endpoints"]]
22 |
23 | get_resp = integration_client.cdn.get_endpoint(cdn_id)
24 |
25 | assert cdn_id == get_resp["endpoint"]["id"]
26 |
27 | ttl = 86400
28 | update_req = {"ttl": ttl}
29 |
30 | update_resp = integration_client.cdn.update_endpoints(cdn_id, update_req)
31 |
32 | assert update_resp["endpoint"]["ttl"] == ttl
33 |
34 | purge_req = {"files": ["*"]}
35 |
36 | purge_resp = integration_client.cdn.purge_cache(cdn_id, purge_req)
37 |
38 | assert purge_resp is None
39 |
--------------------------------------------------------------------------------
/tests/integration/test_container_registry.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=duplicate-code,line-too-long,too-many-locals
2 | """ test_container_registry.py
3 | Integration tests for container registry.
4 | """
5 |
6 | import uuid
7 |
8 | from tests.integration import defaults
9 | from pydo import Client
10 |
11 |
12 | def test_container_registry(integration_client: Client):
13 | """Tests listing, retrieving, and deleting a container registry."""
14 |
15 | expected_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
16 | container_registry_req = {
17 | "name": expected_name,
18 | "subscription_tier_slug": "basic",
19 | "region": defaults.REGION,
20 | }
21 |
22 | # delete a registry
23 | delete_resp = integration_client.registry.delete()
24 | assert delete_resp is None
25 |
26 | # create a registry
27 | container_registry_resp = integration_client.registry.create(
28 | body=container_registry_req
29 | )
30 | assert container_registry_resp["registry"]["name"] == expected_name
31 | registry_name = container_registry_resp["registry"]["name"]
32 |
33 | # get docker credentials
34 | get_docker_resp = integration_client.registry.get_docker_credentials(
35 | read_write=True
36 | )
37 | assert get_docker_resp["auths"]["registry.digitalocean.com"]["auth"] is not None
38 |
39 | # get a registry
40 | get_resp = integration_client.registry.get()
41 | assert get_resp["registry"]["name"] == expected_name
42 | registry_name = get_resp["registry"]["name"]
43 |
44 | # get subscription information
45 | get_sub_resp = integration_client.registry.get_subscription()
46 | assert get_sub_resp["subscription"]["tier"]["slug"] == "basic"
47 |
48 | # update subscription tier
49 | update_sub_resp = integration_client.registry.update_subscription(
50 | {"tier_slug": "starter"}
51 | )
52 | assert update_sub_resp["subscription"]["tier"]["slug"] == "starter"
53 |
54 | # validate container registry name
55 | new_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
56 | validate_name = integration_client.registry.validate_name({"name": new_name})
57 | assert validate_name is None
58 |
59 | # start a garbage collection
60 | garbage = integration_client.registry.run_garbage_collection(
61 | registry_name=registry_name
62 | )
63 | assert garbage["garbage_collection"]["status"] == "requested"
64 |
65 | # get active garbage collection
66 | garbage_active = integration_client.registry.get_garbage_collection(
67 | registry_name=registry_name
68 | )
69 | assert garbage_active["garbage_collection"]["registry_name"] == registry_name
70 |
71 | # list garbage collection
72 | garbage_list = integration_client.registry.list_garbage_collections(
73 | registry_name=registry_name
74 | )
75 | assert len(garbage_list["garbage_collections"]) > 0
76 |
77 | # list registry options
78 | registry_list_options = integration_client.registry.get_options()
79 | assert len(registry_list_options["options"]["available_regions"]) > 0
80 |
81 | # delete a registry
82 | delete_resp = integration_client.registry.delete()
83 | assert delete_resp is None
84 |
--------------------------------------------------------------------------------
/tests/integration/test_databases.py:
--------------------------------------------------------------------------------
1 | """ test_databases.py
2 | Integration Test for Databases
3 | """
4 |
5 | import uuid
6 |
7 | import pytest
8 | from pydo import Client
9 | from tests.integration import defaults, shared
10 |
11 |
12 | @pytest.mark.long_running
13 | def test_databases_update_connection_pool(integration_client: Client):
14 | """Tests updating the connection pool for a database (PostgreSQL).
15 |
16 | Creates a database cluster and waits for its status to be `active`.
17 | Then creates a connection pool.
18 | Then updates the connection pool.
19 | The cluster gets deleted when complete.
20 | """
21 |
22 | db_create_req = {
23 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
24 | "engine": "pg",
25 | "version": "14",
26 | "region": "nyc3",
27 | "size": "db-s-2vcpu-4gb",
28 | "num_nodes": 2,
29 | "tags": ["production"],
30 | }
31 |
32 | with shared.with_test_database(
33 | integration_client, wait=True, **db_create_req
34 | ) as database_resp:
35 | db_id = database_resp["database"]["id"]
36 |
37 | create_pool_req = {
38 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
39 | "mode": "transaction",
40 | "size": 10,
41 | "db": "defaultdb",
42 | "user": "doadmin",
43 | }
44 |
45 | pool_resp = integration_client.databases.add_connection_pool(
46 | db_id, create_pool_req
47 | )
48 |
49 | assert pool_resp is not None
50 | assert "pool" in pool_resp.keys()
51 | pool_name = pool_resp["pool"]["name"]
52 |
53 | new_pool_mode = "session"
54 | new_pool_size = 15
55 |
56 | update_pool_resp = integration_client.databases.update_connection_pool(
57 | db_id,
58 | pool_name,
59 | {
60 | "mode": new_pool_mode,
61 | "size": new_pool_size,
62 | "db": "defaultdb",
63 | "user": "doadmin",
64 | },
65 | )
66 |
67 | assert update_pool_resp is None
68 |
69 | pool_details = integration_client.databases.get_connection_pool(
70 | db_id, pool_name
71 | )
72 |
73 | assert pool_details["pool"]["mode"] == new_pool_mode
74 | assert pool_details["pool"]["size"] == new_pool_size
75 |
76 |
77 | @pytest.mark.long_running
78 | def test_databases_update_major_version(integration_client: Client):
79 | """Tests updating the databases version.
80 |
81 | Creates a database cluster and waits for its status to be `active`.
82 | Then updates the cluster.
83 | """
84 |
85 | db_create_req = {
86 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
87 | "engine": "pg",
88 | "version": "13",
89 | "region": "nyc3",
90 | "size": "db-s-2vcpu-4gb",
91 | "num_nodes": 2,
92 | "tags": ["production"],
93 | }
94 |
95 | with shared.with_test_database(
96 | integration_client, wait=True, **db_create_req
97 | ) as database_resp:
98 | db_id = database_resp["database"]["id"]
99 |
100 | update_req = {
101 | "version": "14",
102 | }
103 |
104 | update_resp = integration_client.databases.update_major_version(
105 | db_id, update_req
106 | )
107 |
108 | assert update_resp is None
109 |
110 |
111 | @pytest.mark.long_running
112 | def test_databases_create_replica_and_promote_as_primary(integration_client: Client):
113 | """Tests creating a replica of a database and promoting the
114 | replica as the primary database.
115 | """
116 |
117 | db_create_req = {
118 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
119 | "engine": "pg",
120 | "version": "13",
121 | "region": "nyc3",
122 | "size": "db-s-2vcpu-4gb",
123 | "num_nodes": 2,
124 | "tags": ["production"],
125 | }
126 |
127 | with shared.with_test_database(
128 | integration_client, wait=True, **db_create_req
129 | ) as database_resp:
130 | db_id = database_resp["database"]["id"]
131 | replica_name = "read-nyc3-01"
132 |
133 | create_replica = {
134 | "name": replica_name,
135 | "region": "nyc3",
136 | "size": "db-s-2vcpu-4gb",
137 | }
138 |
139 | create_rep_response = integration_client.databases.create_replica(
140 | db_id, create_replica
141 | )
142 |
143 | assert create_rep_response is not None
144 |
145 | promote_replica = integration_client.databases.promote_replica(
146 | db_id, replica_name
147 | )
148 |
149 | assert promote_replica is None
150 |
--------------------------------------------------------------------------------
/tests/integration/test_domains.py:
--------------------------------------------------------------------------------
1 | """ test_domain.py
2 | Integration Test for Domains
3 | """
4 |
5 | import uuid
6 |
7 | from tests.integration import defaults
8 | from tests.integration import shared
9 | from pydo import Client
10 |
11 |
12 | def test_domains_create_record(integration_client: Client):
13 | """Testing the creation of a Domain.
14 | and creating a record from created domain.
15 | """
16 | name = f"{defaults.PREFIX}{uuid.uuid4()}.com"
17 | create_domain = {"name": name}
18 |
19 | with shared.with_test_domain(integration_client, create_domain) as domain:
20 | list_resp = integration_client.domains.list()
21 | assert len(list_resp["domains"]) > 0
22 |
23 | get_resp = integration_client.domains.get(domain["domain"]["name"])
24 | assert name == get_resp["domain"]["name"]
25 |
26 | create_record = {
27 | "type": "A",
28 | "name": name,
29 | "data": "162.10.66.0",
30 | "priority": None,
31 | "port": None,
32 | "ttl": 1800,
33 | "weight": None,
34 | "flags": None,
35 | "tag": None,
36 | }
37 |
38 | with shared.with_test_domain_record(
39 | integration_client, name, create_record
40 | ) as record:
41 | list_resp = integration_client.domains.list_records(name)
42 | assert len(list_resp["domain_records"]) > 0
43 |
44 | record_id = record["domain_record"]["id"]
45 | get_resp = integration_client.domains.get_record(name, record_id)
46 | assert get_resp["domain_record"]["id"] == record_id
47 |
48 | ttl = 900
49 | patch_request = {"type": "A", "ttl": ttl}
50 |
51 | patch_resp = integration_client.domains.patch_record(
52 | name, record_id, patch_request
53 | )
54 | assert patch_resp["domain_record"]["ttl"] == ttl
55 |
56 | ttl = 1000
57 | update_request = {"type": "A", "ttl": ttl}
58 |
59 | update_resp = integration_client.domains.update_record(
60 | name, record_id, update_request
61 | )
62 | assert update_resp["domain_record"]["ttl"] == ttl
63 |
--------------------------------------------------------------------------------
/tests/integration/test_droplets.py:
--------------------------------------------------------------------------------
1 | """ test_droplets.py
2 | Integration tests for droplets.
3 | """
4 |
5 | import uuid
6 |
7 | from tests.integration import defaults
8 | from tests.integration import shared
9 | from pydo import Client
10 |
11 |
12 | def test_droplet_attach_volume(integration_client: Client, public_key: bytes):
13 | """Tests attaching a volume to a droplet.
14 |
15 | Creates a droplet and waits for its status to be `active`.
16 | Then creates a volume.
17 | Then attaches the volume to the droplet and waits for the create action
18 | to complete.
19 | Then, detaches the volume.
20 | """
21 | droplet_req = {
22 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
23 | "region": defaults.REGION,
24 | "size": defaults.DROPLET_SIZE,
25 | "image": defaults.DROPLET_IMAGE,
26 | }
27 |
28 | with shared.with_test_droplet(
29 | integration_client, public_key, **droplet_req
30 | ) as droplet:
31 | shared.wait_for_action(integration_client, droplet["links"]["actions"][0]["id"])
32 | droplet_get_resp = integration_client.droplets.get(droplet["droplet"]["id"])
33 | assert droplet_get_resp["droplet"]["status"] == "active"
34 |
35 | volume_req = {
36 | "size_gigabytes": 10,
37 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
38 | "description": "Block storage testing",
39 | "region": defaults.REGION,
40 | "filesystem_type": "ext4",
41 | }
42 |
43 | with shared.with_test_volume(integration_client, **volume_req) as volume:
44 |
45 | vol_attach_resp = integration_client.volume_actions.post_by_id(
46 | volume["volume"]["id"],
47 | {"type": "attach", "droplet_id": droplet["droplet"]["id"]},
48 | )
49 | shared.wait_for_action(integration_client, vol_attach_resp["action"]["id"])
50 | droplet_get_resp = integration_client.droplets.get(droplet["droplet"]["id"])
51 | assert (
52 | vol_attach_resp["volume"]["id"]
53 | in droplet_get_resp["droplet"]["volume_ids"]
54 | )
55 |
56 | vol_dettach_resp = integration_client.volume_actions.post_by_id(
57 | volume["volume"]["id"],
58 | {"type": "detach", "droplet_id": droplet["droplet"]["id"]},
59 | )
60 | shared.wait_for_action(integration_client, vol_dettach_resp["action"]["id"])
61 | droplet_get_resp = integration_client.droplets.get(droplet["droplet"]["id"])
62 | assert (
63 | vol_attach_resp["volume"]["id"]
64 | in droplet_get_resp["droplet"]["volume_ids"]
65 | )
66 |
--------------------------------------------------------------------------------
/tests/integration/test_firewalls.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=duplicate-code
2 |
3 | """ test_firewalls.py
4 | Integration tests for firewalls.
5 | """
6 |
7 | import uuid
8 |
9 | from tests.integration import defaults, shared
10 | from pydo import Client
11 |
12 |
13 | def test_firewalls(integration_client: Client):
14 | """Tests creating, updating, and deleting a firewall"""
15 | tag_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
16 | firewall_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
17 | tag_body = {"name": tag_name}
18 |
19 | with shared.with_test_tag(integration_client, **tag_body):
20 | try:
21 | create_req = {
22 | "name": firewall_name,
23 | "outbound_rules": [
24 | {
25 | "protocol": "tcp",
26 | "ports": "80",
27 | "destinations": {"addresses": ["0.0.0.0/0", "::/0"]},
28 | }
29 | ],
30 | "tags": [tag_name],
31 | }
32 |
33 | # Create firewall
34 | firewall = integration_client.firewalls.create(body=create_req)
35 | firewall_id = firewall["firewall"]["id"]
36 | assert firewall_id is not None
37 | assert firewall["firewall"]["name"] == firewall_name
38 | assert firewall["firewall"]["tags"] == [tag_name]
39 | assert firewall["firewall"]["outbound_rules"][0]["protocol"] == "tcp"
40 | assert firewall["firewall"]["outbound_rules"][0]["ports"] == "80"
41 | assert firewall["firewall"]["outbound_rules"][0]["destinations"] == {
42 | "addresses": ["0.0.0.0/0", "::/0"]
43 | }
44 |
45 | # GET firewall
46 | got = integration_client.firewalls.get(firewall_id=firewall_id)
47 | assert firewall_id == got["firewall"]["id"]
48 | assert got["firewall"]["name"] == firewall_name
49 | assert got["firewall"]["tags"] == [tag_name]
50 | assert got["firewall"]["outbound_rules"][0]["protocol"] == "tcp"
51 | assert got["firewall"]["outbound_rules"][0]["ports"] == "80"
52 | assert got["firewall"]["outbound_rules"][0]["destinations"] == {
53 | "addresses": ["0.0.0.0/0", "::/0"]
54 | }
55 |
56 | # Add rule
57 | rule = {
58 | "inbound_rules": [
59 | {
60 | "protocol": "tcp",
61 | "ports": "2222",
62 | "sources": {"addresses": ["0.0.0.0/0", "::/0"]},
63 | }
64 | ]
65 | }
66 | integration_client.firewalls.add_rules(firewall_id=firewall_id, body=rule)
67 | updated = integration_client.firewalls.get(firewall_id=firewall_id)
68 | assert updated["firewall"]["inbound_rules"][0]["protocol"] == "tcp"
69 | assert updated["firewall"]["inbound_rules"][0]["ports"] == "2222"
70 | assert updated["firewall"]["inbound_rules"][0]["sources"] == {
71 | "addresses": ["0.0.0.0/0", "::/0"]
72 | }
73 |
74 | # Remove rule
75 | remove = {
76 | "outbound_rules": [
77 | {
78 | "protocol": "tcp",
79 | "ports": "80",
80 | "destinations": {"addresses": ["0.0.0.0/0", "::/0"]},
81 | }
82 | ]
83 | }
84 | integration_client.firewalls.delete_rules(
85 | firewall_id=firewall_id, body=remove
86 | )
87 | removed = integration_client.firewalls.get(firewall_id=firewall_id)
88 | assert len(removed["firewall"]["outbound_rules"]) == 0
89 |
90 | finally:
91 | # Delete firewall
92 | if firewall_id is not None:
93 | integration_client.firewalls.delete(firewall_id=firewall_id)
94 |
--------------------------------------------------------------------------------
/tests/integration/test_images.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=line-too-long
2 | """ test_images.py
3 | Integration tests for images.
4 | """
5 |
6 | import uuid
7 |
8 | from tests.integration import defaults
9 | from pydo import Client
10 |
11 |
12 | def test_images(integration_client: Client):
13 | """Tests listing, retrieving, and deleting a image."""
14 |
15 | expected_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
16 | image_req = {
17 | "name": expected_name,
18 | "url": "http://cloud-images.ubuntu.com/minimal/releases/bionic/release/ubuntu-18.04-minimal-cloudimg-amd64.img",
19 | "distribution": "Ubuntu",
20 | "region": "nyc3",
21 | "description": "Cloud-optimized image w/ small footprint",
22 | "tags": ["base-image", "prod"],
23 | }
24 |
25 | image_resp = integration_client.images.create_custom(body=image_req)
26 | assert image_resp["image"]["name"] == expected_name
27 | image_id = image_resp["image"]["id"]
28 |
29 | # list all images
30 | list_resp = integration_client.images.list()
31 | assert len(list_resp) > 0
32 |
33 | # list all images with prod tag
34 | list_resp = integration_client.images.list(tag_name="prod")
35 | assert len(list_resp) > 0
36 | assert list_resp["images"][0]["tag_name"] == "prod"
37 |
38 | # get an image
39 | get_resp = integration_client.images.get(image_id=image_id)
40 | assert get_resp["image"]["name"] == expected_name
41 |
42 | # update an image
43 | new_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
44 | update_req = {"name": new_name, "distribution": "Ubuntu", "description": " "}
45 | update_resp = integration_client.images.update(body=update_req, image_id=image_id)
46 | assert update_resp["image"]["name"] == new_name
47 |
48 | # delete a snapshot
49 | delete_resp = integration_client.images.delete(image_id=image_id)
50 | assert delete_resp is None
51 |
--------------------------------------------------------------------------------
/tests/integration/test_load_balancers.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=duplicate-code
2 |
3 | """ test_load_balancers.py
4 | Integration tests for load balancers.
5 | """
6 |
7 | import uuid
8 |
9 | from tests.integration import defaults, shared
10 | from pydo import Client
11 |
12 |
13 | def test_load_balancers_tag(integration_client: Client, public_key: bytes):
14 | """
15 | Tests creating a load balancer with a Droplet assigned via a tag, updating
16 | it, and deleting it.
17 | """
18 | tag_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
19 | droplet_req = {
20 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
21 | "region": defaults.REGION,
22 | "size": defaults.DROPLET_SIZE,
23 | "image": defaults.DROPLET_IMAGE,
24 | "tags": [tag_name],
25 | }
26 |
27 | with shared.with_test_tag(integration_client, **{"name": tag_name}):
28 | with shared.with_test_droplet(
29 | integration_client, public_key, **droplet_req
30 | ) as droplet:
31 | # Ensure Droplet create is finished before proceeding
32 | shared.wait_for_action(
33 | integration_client, droplet["links"]["actions"][0]["id"]
34 | )
35 |
36 | lb_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
37 | lb_create = {
38 | "name": lb_name,
39 | "region": defaults.REGION,
40 | "forwarding_rules": [
41 | {
42 | "entry_protocol": "tcp",
43 | "entry_port": 80,
44 | "target_protocol": "tcp",
45 | "target_port": 80,
46 | }
47 | ],
48 | "tag": tag_name,
49 | }
50 |
51 | with shared.with_test_load_balancer(
52 | integration_client, lb_create, wait=True
53 | ) as new_lb:
54 | lbid = new_lb["load_balancer"]["id"]
55 | assert lbid is not None
56 | assert new_lb["load_balancer"]["name"] == lb_name
57 | assert new_lb["load_balancer"]["region"]["slug"] == defaults.REGION
58 | assert new_lb["load_balancer"]["tag"] == tag_name
59 | assert new_lb["load_balancer"]["droplet_ids"] == [
60 | droplet["droplet"]["id"]
61 | ]
62 | assert (
63 | new_lb["load_balancer"]["forwarding_rules"][0]["entry_protocol"]
64 | == "tcp"
65 | )
66 | assert (
67 | new_lb["load_balancer"]["forwarding_rules"][0]["entry_port"] == 80
68 | )
69 | assert (
70 | new_lb["load_balancer"]["forwarding_rules"][0]["target_protocol"]
71 | == "tcp"
72 | )
73 | assert (
74 | new_lb["load_balancer"]["forwarding_rules"][0]["entry_port"] == 80
75 | )
76 | assert new_lb["load_balancer"]["health_check"]["protocol"] == "http"
77 | assert new_lb["load_balancer"]["health_check"]["port"] == 80
78 |
79 | # Update the load balancer customizing the health check
80 | updated_lb = integration_client.load_balancers.update(
81 | lb_id=lbid,
82 | body={
83 | "name": lb_name,
84 | "region": defaults.REGION,
85 | "forwarding_rules": [
86 | {
87 | "entry_protocol": "tcp",
88 | "entry_port": 80,
89 | "target_protocol": "tcp",
90 | "target_port": 8080,
91 | }
92 | ],
93 | "tag": tag_name,
94 | "health_check": {
95 | "protocol": "http",
96 | "port": 8080,
97 | "path": "/",
98 | "check_interval_seconds": 10,
99 | "response_timeout_seconds": 5,
100 | "healthy_threshold": 5,
101 | "unhealthy_threshold": 3,
102 | },
103 | },
104 | )
105 | assert (
106 | updated_lb["load_balancer"]["forwarding_rules"][0]["target_port"]
107 | == 8080
108 | )
109 | assert updated_lb["load_balancer"]["health_check"]["port"] == 8080
110 |
111 | # Add a forwarding rule using the forwarding_rules endpoint
112 | rule = {
113 | "forwarding_rules": [
114 | {
115 | "entry_protocol": "udp",
116 | "entry_port": 194,
117 | "target_protocol": "udp",
118 | "target_port": 194,
119 | }
120 | ]
121 | }
122 | integration_client.load_balancers.add_forwarding_rules(
123 | lb_id=lbid, body=rule
124 | )
125 | got_lb = integration_client.load_balancers.get(lb_id=lbid)
126 | assert len(got_lb["load_balancer"]["forwarding_rules"]) == 2
127 | assert (
128 | got_lb["load_balancer"]["forwarding_rules"][1]["entry_protocol"]
129 | == "udp"
130 | )
131 | assert (
132 | got_lb["load_balancer"]["forwarding_rules"][1]["entry_port"] == 194
133 | )
134 | assert (
135 | got_lb["load_balancer"]["forwarding_rules"][1]["target_protocol"]
136 | == "udp"
137 | )
138 | assert (
139 | got_lb["load_balancer"]["forwarding_rules"][1]["target_port"] == 194
140 | )
141 |
142 | # Remove a forwarding rule using the forwarding_rules endpoint
143 | integration_client.load_balancers.remove_forwarding_rules(
144 | lb_id=lbid, body=rule
145 | )
146 | got_lb = integration_client.load_balancers.get(lb_id=lbid)
147 | assert len(got_lb["load_balancer"]["forwarding_rules"]) == 1
148 | assert (
149 | got_lb["load_balancer"]["forwarding_rules"][0]["entry_protocol"]
150 | == "tcp"
151 | )
152 | assert (
153 | got_lb["load_balancer"]["forwarding_rules"][0]["entry_port"] == 80
154 | )
155 | assert (
156 | got_lb["load_balancer"]["forwarding_rules"][0]["target_protocol"]
157 | == "tcp"
158 | )
159 | assert (
160 | got_lb["load_balancer"]["forwarding_rules"][0]["entry_port"] == 80
161 | )
162 |
163 |
164 | def test_load_balancers_droplets(integration_client: Client, public_key: bytes):
165 | """
166 | Tests creating a load balancer exercising the add and remove Droplet
167 | endpoints.
168 | """
169 | tag_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
170 | droplet_req = {
171 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
172 | "region": defaults.REGION,
173 | "size": defaults.DROPLET_SIZE,
174 | "image": defaults.DROPLET_IMAGE,
175 | "tags": [tag_name],
176 | }
177 |
178 | with shared.with_test_tag(integration_client, **{"name": tag_name}):
179 | with shared.with_test_droplet(
180 | integration_client, public_key, **droplet_req
181 | ) as droplet:
182 | # Ensure Droplet create is finished before proceeding
183 | shared.wait_for_action(
184 | integration_client, droplet["links"]["actions"][0]["id"]
185 | )
186 |
187 | lb_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
188 | lb_create = {
189 | "name": lb_name,
190 | "region": defaults.REGION,
191 | "forwarding_rules": [
192 | {
193 | "entry_protocol": "tcp",
194 | "entry_port": 80,
195 | "target_protocol": "tcp",
196 | "target_port": 80,
197 | }
198 | ],
199 | }
200 |
201 | with shared.with_test_load_balancer(
202 | integration_client, wait=True, body=lb_create
203 | ) as new_lb:
204 | lbid = new_lb["load_balancer"]["id"]
205 | assert lbid is not None
206 | assert new_lb["load_balancer"]["name"] == lb_name
207 | assert new_lb["load_balancer"]["tag"] == ""
208 | assert new_lb["load_balancer"]["droplet_ids"] == []
209 |
210 | # Add Droplet
211 | droplet_ids = {"droplet_ids": [droplet["droplet"]["id"]]}
212 | integration_client.load_balancers.add_droplets(
213 | lb_id=lbid, body=droplet_ids
214 | )
215 | got_lb = integration_client.load_balancers.get(lb_id=lbid)
216 | assert got_lb["load_balancer"]["droplet_ids"] == [
217 | droplet["droplet"]["id"]
218 | ]
219 |
220 | # Remove Droplet
221 | integration_client.load_balancers.remove_droplets(
222 | lb_id=lbid, body=droplet_ids
223 | )
224 | got_lb = integration_client.load_balancers.get(lb_id=lbid)
225 | assert got_lb["load_balancer"]["droplet_ids"] == []
226 |
--------------------------------------------------------------------------------
/tests/integration/test_monitoring.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=too-many-locals
2 |
3 | """ test_monitoring.py
4 | Integration Tests for Monitoring
5 | """
6 |
7 | import uuid
8 |
9 | from pydo import Client
10 | from tests.integration import defaults, shared
11 |
12 |
13 | def test_monitoring_alert_policies(integration_client: Client, public_key: bytes):
14 | """Tests creating, listing, getting, updating, and deleting an alert policy"""
15 |
16 | test_droplet_req = {
17 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
18 | "region": defaults.REGION,
19 | "size": defaults.DROPLET_SIZE,
20 | "image": defaults.DROPLET_IMAGE,
21 | "tags": ["cg_test_tag"],
22 | }
23 |
24 | with shared.with_test_droplet(
25 | integration_client, public_key, **test_droplet_req
26 | ) as droplet:
27 | shared.wait_for_action(integration_client, droplet["links"]["actions"][0]["id"])
28 | droplet_get_resp = integration_client.droplets.get(droplet["droplet"]["id"])
29 | assert droplet_get_resp["droplet"]["status"] == "active"
30 | droplet_id = droplet_get_resp["droplet"]["id"]
31 |
32 | create_alert_req = {
33 | "alerts": {
34 | "email": ["bob@exmaple.com"],
35 | },
36 | "compare": "GreaterThan",
37 | "description": "CPU Alert",
38 | "enabled": True,
39 | "entities": [str(droplet_id)],
40 | "tags": ["cg_test_tag"],
41 | "type": "v1/insights/droplet/cpu",
42 | "value": 80,
43 | "window": "5m",
44 | }
45 |
46 | create_alert_resp = integration_client.monitoring.create_alert_policy(
47 | body=create_alert_req
48 | )
49 | assert create_alert_resp["policy"]["entities"][0] == str(droplet_id)
50 | alert_uuid = create_alert_resp["policy"]["uuid"]
51 |
52 | # testing listing alert policies
53 | list_alert_policies = integration_client.monitoring.list_alert_policy()
54 | assert len(list_alert_policies["policies"]) > 0
55 |
56 | # testing getting alert policies
57 | get_alert_policies = integration_client.monitoring.get_alert_policy(
58 | alert_uuid=alert_uuid
59 | )
60 | assert get_alert_policies["policy"]["entities"][0] == str(droplet_id)
61 |
62 | update_alert_req = {
63 | "alerts": {
64 | "email": ["carl@exmaple.com"],
65 | },
66 | "compare": "GreaterThan",
67 | "description": "CPU Alert",
68 | "enabled": True,
69 | "tags": ["cg_test_tag"],
70 | "type": "v1/insights/droplet/cpu",
71 | "value": 80,
72 | "window": "5m",
73 | }
74 |
75 | # testing updating alert policy
76 | update_alert_policies = integration_client.monitoring.update_alert_policy(
77 | alert_uuid=alert_uuid, body=update_alert_req
78 | )
79 | assert "carl@exmaple.com" in update_alert_policies["policy"]["alerts"]["email"]
80 |
81 | # testing deleting alert policy
82 | delete_alert_policies = integration_client.monitoring.delete_alert_policy(
83 | alert_uuid=alert_uuid
84 | )
85 | assert delete_alert_policies is None
86 |
87 |
88 | def test_monitoring_metrics(integration_client: Client, public_key: bytes):
89 | """Tests Getting Various Metrics"""
90 |
91 | test_droplet_req = {
92 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
93 | "region": defaults.REGION,
94 | "size": defaults.DROPLET_SIZE,
95 | "image": defaults.DROPLET_IMAGE,
96 | "tags": ["cg_test_tag"],
97 | }
98 |
99 | with shared.with_test_droplet(
100 | integration_client, public_key, **test_droplet_req
101 | ) as droplet:
102 | shared.wait_for_action(integration_client, droplet["links"]["actions"][0]["id"])
103 | droplet_get_resp = integration_client.droplets.get(droplet["droplet"]["id"])
104 | assert droplet_get_resp["droplet"]["status"] == "active"
105 | droplet_id = droplet_get_resp["droplet"]["id"]
106 |
107 | # testing getting droplet bandwidth metrics
108 | bandwidth_metric = integration_client.monitoring.get_droplet_bandwidth_metrics(
109 | host_id=str(droplet_id),
110 | interface="public",
111 | direction="outbound",
112 | start="1620683817",
113 | end="1620705417",
114 | )
115 | assert bandwidth_metric["status"] == "success"
116 |
117 | # testing getting droplet cpu metrics
118 | cpu_metric = integration_client.monitoring.get_droplet_cpu_metrics(
119 | host_id=str(droplet_id), start="1620683817", end="1620705417"
120 | )
121 | assert cpu_metric["status"] == "success"
122 |
123 | # testing getting filesystem free metrics
124 | filesystem_free_metric = (
125 | integration_client.monitoring.get_droplet_filesystem_free_metrics(
126 | host_id=str(droplet_id), start="1620683817", end="1620705417"
127 | )
128 | )
129 | assert filesystem_free_metric["status"] == "success"
130 |
131 | # testing getting load1 metrics
132 | load1_metric = integration_client.monitoring.get_droplet_load1_metrics(
133 | host_id=str(droplet_id), start="1620683817", end="1620705417"
134 | )
135 | assert load1_metric["status"] == "success"
136 |
137 | # testing getting load5 metrics
138 | load5_metric = integration_client.monitoring.get_droplet_load5_metrics(
139 | host_id=str(droplet_id), start="1620683817", end="1620705417"
140 | )
141 | assert load5_metric["status"] == "success"
142 |
143 | # testing getting load15 metrics
144 | load15_metric = integration_client.monitoring.get_droplet_load15_metrics(
145 | host_id=str(droplet_id), start="1620683817", end="1620705417"
146 | )
147 | assert load15_metric["status"] == "success"
148 |
149 | # testing getting droplet memory cached
150 | memory_cached_metric = (
151 | integration_client.monitoring.get_droplet_memory_cached_metrics(
152 | host_id=str(droplet_id), start="1620683817", end="1620705417"
153 | )
154 | )
155 | assert memory_cached_metric["status"] == "success"
156 |
157 | # testing getting droplet free memory
158 | memory_free_metric = (
159 | integration_client.monitoring.get_droplet_memory_free_metrics(
160 | host_id=str(droplet_id), start="1620683817", end="1620705417"
161 | )
162 | )
163 | assert memory_free_metric["status"] == "success"
164 |
165 | # testing getting droplet total memory
166 | memory_total_metric = (
167 | integration_client.monitoring.get_droplet_memory_total_metrics(
168 | host_id=str(droplet_id), start="1620683817", end="1620705417"
169 | )
170 | )
171 | assert memory_total_metric["status"] == "success"
172 |
173 | # testing getting droplet available memory
174 | memory_available_metric = (
175 | integration_client.monitoring.get_droplet_memory_available_metrics(
176 | host_id=str(droplet_id), start="1620683817", end="1620705417"
177 | )
178 | )
179 | assert memory_available_metric["status"] == "success"
180 |
--------------------------------------------------------------------------------
/tests/integration/test_one_clicks.py:
--------------------------------------------------------------------------------
1 | """ Integration Test for 1-clicks"""
2 |
3 | import uuid
4 | import pytest
5 | from pydo import Client
6 |
7 | from tests.integration import defaults, shared
8 |
9 |
10 | @pytest.mark.parametrize(
11 | "params,expected_types",
12 | [
13 | ({}, {"droplet", "kubernetes"}),
14 | ({"type": "kubernetes"}, {"kubernetes"}),
15 | ({"type": "droplet"}, {"droplet"}),
16 | ({"type": "thisshouldnotmatch"}, None),
17 | ],
18 | )
19 | def test_one_click_list(integration_client: Client, params: dict, expected_types: set):
20 | """Test the one_click list operation"""
21 |
22 | list_resp = integration_client.one_clicks.list(**params)
23 |
24 | assert list_resp is not None
25 | one_clicks = list_resp.get("1_clicks", None)
26 |
27 | if expected_types is None:
28 | assert one_clicks is None
29 | else:
30 | assert one_clicks is not None
31 | assert isinstance(one_clicks, list)
32 | assert len(one_clicks) > 0
33 |
34 | returned_types = {i["type"] for i in one_clicks}
35 | assert returned_types == expected_types
36 |
37 |
38 | def test_one_click_install_kubernetes_app(integration_client: Client):
39 | """Test the one_click install_kubernetes operation
40 |
41 | Waits for the cluster state to be `running`.
42 | Then installs the one_click application.
43 | Then waits for the install action to complete.
44 | """
45 |
46 | cluster_create_req = {
47 | "name": f"{defaults.PREFIX}-cluster-{uuid.uuid4()}",
48 | "region": defaults.REGION,
49 | "version": defaults.K8S_VERSION,
50 | "node_pools": [{"size": defaults.K8S_NODE_SIZE, "count": 2, "name": "workers"}],
51 | }
52 |
53 | with shared.with_test_kubernetes_cluster(
54 | integration_client, **cluster_create_req, wait=True
55 | ) as cluster:
56 | cluster_id = cluster["kubernetes_cluster"]["id"]
57 |
58 | install_req = {
59 | "addon_slugs": ["kube-state-metrics", "loki"],
60 | "cluster_uuid": cluster_id,
61 | }
62 |
63 | install_resp = integration_client.one_clicks.install_kubernetes(install_req)
64 |
65 | assert install_resp is not None
66 | assert install_resp["message"] == "Successfully kicked off addon job."
67 |
--------------------------------------------------------------------------------
/tests/integration/test_projects.py:
--------------------------------------------------------------------------------
1 | """ test_projects.py
2 | Integration Tests for Projects
3 | """
4 |
5 | import uuid
6 |
7 | from pydo import Client
8 | from tests.integration import defaults
9 |
10 |
11 | def test_projects(integration_client: Client):
12 | """Tests creating, updating, and deleting a project"""
13 |
14 | # test creating the project
15 | expected_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
16 | create_req = {
17 | "name": expected_name,
18 | "description": "Test project for python client",
19 | "purpose": "testing",
20 | "environment": "Development",
21 | }
22 | create_resp = integration_client.projects.create(body=create_req)
23 | assert create_resp["project"]["name"] == expected_name
24 |
25 | project_id = create_resp["project"]["id"]
26 |
27 | # test getting a project
28 | get_resp = integration_client.projects.get(project_id=project_id)
29 | assert get_resp["project"]["name"] == expected_name
30 |
31 | # test updating a project
32 | updated_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
33 | update_req = {
34 | "name": updated_name,
35 | "description": "Test project for python client",
36 | "purpose": "testing",
37 | "environment": "Development",
38 | "is_default": False,
39 | }
40 | update_resp = integration_client.projects.update(
41 | project_id=project_id, body=update_req
42 | )
43 | assert update_resp["project"]["name"] == updated_name
44 |
45 | # test patching a project
46 | patch_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
47 | patch_req = {
48 | "name": patch_name,
49 | }
50 | patch_resp = integration_client.projects.patch(
51 | project_id=project_id, body=patch_req
52 | )
53 | assert patch_resp["project"]["name"] == patch_name
54 |
55 | # test listing a project
56 | list_resp = integration_client.projects.list()
57 | # there should always be atleast a default project
58 | assert len(list_resp["projects"]) > 0
59 |
60 | # test deleting a project
61 | # Work around endpoint requiring "application/json" for DELETES even though
62 | # there is no request or response body.
63 | custom_headers = {"Content-Type": "application/json"}
64 | delete_resp = integration_client.projects.delete(
65 | headers=custom_headers, project_id=project_id
66 | )
67 | assert delete_resp is None
68 |
69 |
70 | def test_projects_default(integration_client: Client):
71 | """Testing GETting, updating, patching, getting the default Project operation"""
72 |
73 | # test getting the default project
74 | get_resp = integration_client.projects.get_default()
75 | assert get_resp["project"]["is_default"]
76 |
77 | # test updating the default project
78 | expected_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
79 | update_req = {
80 | "name": expected_name,
81 | "description": "Test project for python client",
82 | "purpose": "testing",
83 | "environment": "Development",
84 | "is_default": True,
85 | }
86 | update_resp = integration_client.projects.update_default(body=update_req)
87 | assert update_resp["project"]["name"] == expected_name
88 | assert update_resp["project"]["is_default"]
89 |
90 | # test patching the default project
91 | expected_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
92 | patch_req = {
93 | "name": expected_name,
94 | }
95 | patch_resp = integration_client.projects.patch_default(body=patch_req)
96 | assert patch_resp["project"]["name"] == expected_name
97 | assert patch_resp["project"]["is_default"]
98 |
--------------------------------------------------------------------------------
/tests/integration/test_regions.py:
--------------------------------------------------------------------------------
1 | """ test_regions.py
2 | Integration Test for Regions
3 | """
4 |
5 | from pydo import Client
6 |
7 |
8 | def test_regions(integration_client: Client):
9 | """Testing the list of regions"""
10 |
11 | list_resp = integration_client.regions.list()
12 |
13 | assert len(list_resp["regions"]) >= 13
14 |
--------------------------------------------------------------------------------
/tests/integration/test_reserved_ips.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=duplicate-code
2 |
3 | """ test_reserved_ips.py
4 | Integration tests for reserved IPs.
5 | """
6 |
7 | import uuid
8 |
9 | from tests.integration import defaults, shared
10 | from pydo import Client
11 |
12 |
13 | def test_reserved_ips(integration_client: Client, public_key: bytes):
14 | """Tests creating a reserved IP, assigning it to a Droplet, and deleting it."""
15 | droplet_req = {
16 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
17 | "region": defaults.REGION,
18 | "size": defaults.DROPLET_SIZE,
19 | "image": defaults.DROPLET_IMAGE,
20 | }
21 |
22 | with shared.with_test_droplet(
23 | integration_client, public_key, **droplet_req
24 | ) as droplet:
25 | try:
26 | # Create reserved IP
27 | new_rip = integration_client.reserved_ips.create(
28 | body={"region": defaults.REGION}
29 | )
30 | rip = new_rip["reserved_ip"]["ip"]
31 | assert rip is not None
32 | assert new_rip["reserved_ip"]["region"]["slug"] == defaults.REGION
33 | assert new_rip["reserved_ip"]["droplet"] is None
34 |
35 | # Ensure Droplet create is finished to prevent "Droplet already has
36 | # a pending event" error when assigning the reserved IP.
37 | shared.wait_for_action(
38 | integration_client, droplet["links"]["actions"][0]["id"]
39 | )
40 |
41 | # Assign reserved IP to a Droplet
42 | droplet_id = droplet["droplet"]["id"]
43 | assign_action = integration_client.reserved_ips_actions.post(
44 | reserved_ip=rip,
45 | body={"type": "assign", "droplet_id": droplet_id},
46 | )
47 | assert assign_action["action"]["type"] == "assign_ip"
48 | shared.wait_for_action(integration_client, assign_action["action"]["id"])
49 |
50 | assigned_rip = integration_client.reserved_ips.get(reserved_ip=rip)
51 | assert assigned_rip["reserved_ip"]["droplet"]["id"] == droplet_id
52 |
53 | # Unassign reserved IP
54 | unassign_action = integration_client.reserved_ips_actions.post(
55 | reserved_ip=rip,
56 | body={"type": "unassign"},
57 | )
58 | assert unassign_action["action"]["type"] == "unassign_ip"
59 | shared.wait_for_action(integration_client, unassign_action["action"]["id"])
60 |
61 | unassigned_rip = integration_client.reserved_ips.get(reserved_ip=rip)
62 | assert unassigned_rip["reserved_ip"]["droplet"] is None
63 | assert new_rip["reserved_ip"]["region"]["slug"] == defaults.REGION
64 |
65 | finally:
66 | # Delete reserved IP
67 | if rip is not None:
68 | integration_client.reserved_ips.delete(reserved_ip=rip)
69 |
--------------------------------------------------------------------------------
/tests/integration/test_sizes.py:
--------------------------------------------------------------------------------
1 | """ test_sizes.py
2 | Integration Test for Sizes
3 | """
4 |
5 | import pytest
6 |
7 | from pydo import Client
8 | from pydo.aio import Client as aioClient
9 |
10 |
11 | def test_sizes_list(integration_client: Client):
12 | """Testing the List of the Sizes endpoint"""
13 | list_resp = integration_client.sizes.list()
14 |
15 | assert len(list_resp["sizes"]) >= 20
16 |
17 |
18 | @pytest.mark.asyncio
19 | async def test_sizes_list_async(async_integration_client: aioClient):
20 | """Testing the List of the Sizes endpoint"""
21 | list_resp = await async_integration_client.sizes.list()
22 |
23 | assert len(list_resp["sizes"]) >= 20
24 |
--------------------------------------------------------------------------------
/tests/integration/test_snapshots.py:
--------------------------------------------------------------------------------
1 | """ test_snapshots.py
2 | Integration tests for snapshots.
3 | """
4 |
5 | import uuid
6 |
7 | from tests.integration import defaults
8 | from tests.integration import shared
9 | from pydo import Client
10 |
11 |
12 | def test_volume_snapshots(integration_client: Client):
13 | """Tests listing, retrieving, and deleting a snapshot.
14 |
15 | Creates a droplet and waits for its status to be `active`.
16 | Then creates a snapshot and retrieves it.
17 | Then lists all snapshots.
18 | Then, deletes the snapshots.
19 | """
20 |
21 | volume_req = {
22 | "size_gigabytes": 10,
23 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
24 | "description": "Snapshots testing",
25 | "region": defaults.REGION,
26 | "filesystem_type": "ext4",
27 | }
28 |
29 | with shared.with_test_volume(integration_client, **volume_req) as volume:
30 | vol_id = volume["volume"]["id"]
31 | expected_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
32 | vol_attach_resp = integration_client.volume_snapshots.create(
33 | volume_id=vol_id, body={"name": expected_name}
34 | )
35 | assert vol_attach_resp["snapshot"]["name"] == expected_name
36 | snapshot_id = vol_attach_resp["snapshot"]["id"]
37 |
38 | # list all snapshots
39 | list_resp = integration_client.snapshots.list()
40 | assert len(list_resp) > 0
41 |
42 | # get a snapshot
43 | get_resp = integration_client.snapshots.get(snapshot_id=snapshot_id)
44 | assert get_resp["snapshot"]["name"] == expected_name
45 |
46 | # delete a snapshot
47 | delete_resp = integration_client.snapshots.delete(snapshot_id=snapshot_id)
48 | assert delete_resp is None
49 |
--------------------------------------------------------------------------------
/tests/integration/test_ssh_keys.py:
--------------------------------------------------------------------------------
1 | """ test_ssh_keys.py
2 | Integration tests for SSH keys.
3 | """
4 |
5 | from tests.integration import shared
6 | from pydo import Client
7 |
8 |
9 | def test_ssh_keys(integration_client: Client, public_key):
10 | """Tests creating and modifying SSH keys on a live account.
11 |
12 | First, it creates a key using the shared with_ssh_key contextmanager.
13 | Next, it tests GETing the key both by fingerprint and ID.
14 | Than, it updates the key's name.
15 | """
16 | client = integration_client
17 | with shared.with_ssh_key(client, public_key) as fingerprint:
18 | get_by_fingerprint = client.ssh_keys.get(fingerprint)
19 | assert get_by_fingerprint["ssh_key"]["fingerprint"] == fingerprint
20 | name = get_by_fingerprint["ssh_key"]["name"]
21 | key_id = get_by_fingerprint["ssh_key"]["id"]
22 |
23 | get_by_id = client.ssh_keys.get(key_id)
24 | assert get_by_id["ssh_key"]["fingerprint"] == fingerprint
25 | assert get_by_id["ssh_key"]["name"] == name
26 |
27 | new_name = name + "-updated"
28 | updated = client.ssh_keys.update(
29 | ssh_key_identifier=key_id, body={"name": new_name}
30 | )
31 | assert updated["ssh_key"]["name"] == new_name
32 |
--------------------------------------------------------------------------------
/tests/integration/test_tags.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=duplicate-code
2 |
3 | """ test_tags.py
4 | Integration tests for tags.
5 | """
6 |
7 | import uuid
8 |
9 | from tests.integration import defaults, shared
10 | from pydo import Client
11 |
12 |
13 | def test_tag_droplet(integration_client: Client, public_key: bytes):
14 | """Tests tagging a Droplet.
15 |
16 | First, it creates a tag.
17 | Then, it creates a Droplet and waits for its status to be `active`.
18 | Next, it tags the Droplet.
19 | Finally, it deletes the tag.
20 | """
21 | name = f"{defaults.PREFIX}-{uuid.uuid4()}"
22 | create_body = {"name": name}
23 |
24 | with shared.with_test_tag(integration_client, **create_body) as tag:
25 | assert tag["tag"]["name"] == name
26 |
27 | get_resp = integration_client.tags.get(tag_id=name)
28 | assert get_resp["tag"]["name"] == name
29 | assert get_resp["tag"]["resources"]["count"] == 0
30 |
31 | droplet_req = {
32 | "name": f"{defaults.PREFIX}-{uuid.uuid4()}",
33 | "region": defaults.REGION,
34 | "size": defaults.DROPLET_SIZE,
35 | "image": defaults.DROPLET_IMAGE,
36 | }
37 |
38 | with shared.with_test_droplet(
39 | integration_client, public_key, **droplet_req
40 | ) as droplet:
41 | shared.wait_for_action(
42 | integration_client, droplet["links"]["actions"][0]["id"]
43 | )
44 | droplet_id = droplet["droplet"]["id"]
45 | droplet_get_resp = integration_client.droplets.get(droplet_id)
46 | assert droplet_get_resp["droplet"]["status"] == "active"
47 |
48 | assign_req = {
49 | "resources": [
50 | {
51 | "resource_id": f"{droplet_id}",
52 | "resource_type": "droplet",
53 | },
54 | ]
55 | }
56 |
57 | integration_client.tags.assign_resources(
58 | tag_id=name,
59 | body=assign_req,
60 | )
61 |
62 | get_resp = integration_client.tags.get(tag_id=name)
63 | assert get_resp["tag"]["name"] == name
64 | assert (
65 | get_resp["tag"]["resources"]["last_tagged_uri"]
66 | == f"https://api.digitalocean.com/v2/droplets/{droplet_id}"
67 | )
68 | assert get_resp["tag"]["resources"]["count"] == 1
69 | assert get_resp["tag"]["resources"]["droplets"]["count"] == 1
70 |
--------------------------------------------------------------------------------
/tests/integration/test_vpcs.py:
--------------------------------------------------------------------------------
1 | """ test_vpcs.py
2 | Integration Tests for VPCs
3 | """
4 |
5 | import uuid
6 |
7 | from pydo import Client
8 | from tests.integration import defaults
9 | from tests.integration import shared
10 |
11 |
12 | def test_vpcs_create(integration_client: Client):
13 | """Testing create a new VPC"""
14 |
15 | expected_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
16 |
17 | create_req = {
18 | "name": expected_name,
19 | "description": "VPC for testing client gen",
20 | "region": "nyc1",
21 | }
22 |
23 | try:
24 | vpc = integration_client.vpcs.create(create_req)
25 | assert vpc["vpc"]["name"] == expected_name
26 | finally:
27 | integration_client.vpcs.delete(vpc["vpc"]["id"])
28 |
29 |
30 | def test_vpcs_list(integration_client: Client):
31 | """Testing listing all VPCs"""
32 |
33 | list_res = integration_client.vpcs.list()
34 | assert len(list_res) > 0
35 |
36 |
37 | def test_vpcs_get(integration_client: Client):
38 | """Testing GETting a VPC"""
39 |
40 | with shared.with_test_vpc(integration_client) as vpc:
41 | vpc_id = vpc["vpc"]["id"]
42 | get_res = integration_client.vpcs.get(vpc_id)
43 | assert get_res["vpc"]["id"] == vpc["vpc"]["id"]
44 |
45 |
46 | def test_vpcs_update(integration_client: Client):
47 | """Testing updating a VPC"""
48 |
49 | updated_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
50 |
51 | update_req = {
52 | "name": updated_name,
53 | }
54 |
55 | with shared.with_test_vpc(integration_client) as vpc:
56 | vpc_id = vpc["vpc"]["id"]
57 | update_res = integration_client.vpcs.update(vpc_id, body=update_req)
58 | assert update_res["vpc"]["name"] == updated_name
59 |
60 |
61 | def test_vpcs_patch(integration_client: Client):
62 | """Testing patching a VPC (partial update)"""
63 |
64 | updated_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
65 |
66 | update_req = {
67 | "name": updated_name,
68 | }
69 |
70 | with shared.with_test_vpc(integration_client) as vpc:
71 | vpc_id = vpc["vpc"]["id"]
72 | previous_description = vpc["vpc"]["description"]
73 | update_res = integration_client.vpcs.patch(vpc_id, body=update_req)
74 | # ensure name changed but descriptiond didn't.
75 | assert (
76 | update_res["vpc"]["name"] == updated_name
77 | and previous_description == update_res["vpc"]["description"]
78 | )
79 |
80 |
81 | def test_vpcs_delete(integration_client: Client):
82 | """Testing delete a VPC"""
83 |
84 | with shared.with_test_vpc(integration_client) as vpc:
85 | vpc_id = vpc["vpc"]["id"]
86 | delete_res = integration_client.vpcs.delete(vpc_id)
87 | assert delete_res is None
88 |
89 |
90 | def test_vpcs_list_members(integration_client: Client, public_key: bytes):
91 | """Testing listing members of a VPC"""
92 |
93 | expected_name = f"{defaults.PREFIX}-{uuid.uuid4()}"
94 | with shared.with_test_vpc(integration_client) as vpc:
95 | vpc_id = vpc["vpc"]["id"]
96 | droplet_req = {
97 | "name": expected_name,
98 | "region": defaults.REGION,
99 | "size": defaults.DROPLET_SIZE,
100 | "image": defaults.DROPLET_IMAGE,
101 | "vpc_uuid": vpc_id,
102 | }
103 |
104 | with shared.with_test_droplet(
105 | integration_client, public_key, **droplet_req
106 | ) as droplet:
107 | shared.wait_for_action(
108 | integration_client, droplet["links"]["actions"][0]["id"]
109 | )
110 | list_res = integration_client.vpcs.list_members(vpc_id)
111 | members = list_res["members"]
112 | assert members[0]["name"] == expected_name
113 | # have to wait for droplet to be deleted before you can delete vpc
114 | list_res = len(integration_client.vpcs.list_members(vpc_id)["members"])
115 | while list_res != 0:
116 | list_res = len(integration_client.vpcs.list_members(vpc_id)["members"])
117 |
--------------------------------------------------------------------------------
/tests/mocked/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/digitalocean/pydo/54e85481359284d7ffdb0d771185f6d1d998dbc2/tests/mocked/__init__.py
--------------------------------------------------------------------------------
/tests/mocked/conftest.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=redefined-outer-name
2 | """Pytest configuration for mocked tests."""
3 | import pytest
4 |
5 | from pydo import Client
6 | from pydo.aio import Client as aioClient
7 |
8 |
9 | @pytest.fixture(scope="module")
10 | def mock_client_url():
11 | """Returns a url used as the API endpoint for the mock client."""
12 | return "https://testing.local"
13 |
14 |
15 | @pytest.fixture(scope="module")
16 | def mock_client(mock_client_url) -> Client:
17 | """Initializes a mock client
18 | The mock client doesn't use a valid token and has a fake API endpoint set.
19 | """
20 | return Client("", endpoint=mock_client_url)
21 |
22 |
23 | @pytest.fixture(scope="module")
24 | def mock_aio_client(mock_client_url) -> aioClient:
25 | """Initializes a mock aio client
26 | The mock client doesn't use a valid token and has a fake API endpoint set.
27 | """
28 | return aioClient("", endpoint=mock_client_url)
29 |
--------------------------------------------------------------------------------
/tests/mocked/test_account.py:
--------------------------------------------------------------------------------
1 | """Mock tests for the account API resource."""
2 |
3 | import pytest
4 | import responses
5 | from aioresponses import aioresponses
6 |
7 | from pydo import Client
8 | from pydo.aio import Client as aioClient
9 |
10 | EXPECTED_ACCOUNT = {
11 | "account": {
12 | "droplet_limit": 25,
13 | "floating_ip_limit": 5,
14 | "email": "sammy@digitalocean.com",
15 | "uuid": "b6fr89dbf6d9156cace5f3c78dc9851d957381ef",
16 | "email_verified": True,
17 | "status": "active",
18 | "status_message": " ",
19 | "team": {
20 | "uuid": "5df3e3004a17e242b7c20ca6c9fc25b701a47ece",
21 | "name": "My Team",
22 | },
23 | }
24 | }
25 |
26 |
27 | @responses.activate
28 | def test_account_get(mock_client: Client, mock_client_url):
29 | """Mocks the account get operation."""
30 |
31 | responses.add(responses.GET, f"{mock_client_url}/v2/account", json=EXPECTED_ACCOUNT)
32 | acct = mock_client.account.get()
33 |
34 | assert acct == EXPECTED_ACCOUNT
35 |
36 |
37 | @pytest.mark.asyncio
38 | async def test_account_get_async(mock_aio_client: aioClient, mock_client_url):
39 | """Mocks the account get aio operation."""
40 |
41 | with aioresponses() as mock_resp:
42 | mock_resp.get(
43 | f"{mock_client_url}/v2/account", status=200, payload=EXPECTED_ACCOUNT
44 | )
45 | acct = await mock_aio_client.account.get()
46 |
47 | assert acct == EXPECTED_ACCOUNT
48 |
--------------------------------------------------------------------------------
/tests/mocked/test_actions.py:
--------------------------------------------------------------------------------
1 | """Mock tests for the actions API resource"""
2 |
3 | import responses
4 |
5 | from pydo import Client
6 |
7 |
8 | @responses.activate
9 | def test_list(mock_client: Client, mock_client_url):
10 | """Mocks the actions list operation."""
11 | expected = {
12 | "actions": [
13 | {
14 | "id": 36804636,
15 | "status": "completed",
16 | "type": "create",
17 | "started_at": "2020-11-14T16:29:21Z",
18 | "completed_at": "2020-11-14T16:30:06Z",
19 | "resource_id": 3164444,
20 | "resource_type": "droplet",
21 | "region": {},
22 | "region_slug": "string",
23 | },
24 | ]
25 | }
26 |
27 | responses.add(responses.GET, f"{mock_client_url}/v2/actions", json=expected)
28 | list_resp = mock_client.actions.list()
29 |
30 | assert list_resp == expected
31 |
32 |
33 | @responses.activate
34 | def test_get(mock_client: Client, mock_client_url):
35 | """Mocks the actions get operation."""
36 |
37 | action_id = 36804636
38 | expected = {
39 | "actions": {
40 | "id": action_id,
41 | "status": "completed",
42 | "type": "create",
43 | "started_at": "2020-11-14T16:29:21Z",
44 | "completed_at": "2020-11-14T16:30:06Z",
45 | "resource_id": 3164444,
46 | "resource_type": "droplet",
47 | "region": {},
48 | "region_slug": "string",
49 | },
50 | "links": {
51 | "pages": {
52 | "first": "https://api.digitalocean.com/v2/account/keys?page=1",
53 | "prev": "https://api.digitalocean.com/v2/account/keys?page=2",
54 | }
55 | },
56 | "meta": {"total": 1},
57 | }
58 |
59 | responses.add(
60 | responses.GET, f"{mock_client_url}/v2/actions/{action_id}", json=expected
61 | )
62 | get_resp = mock_client.actions.get(action_id)
63 |
64 | assert get_resp == expected
65 |
--------------------------------------------------------------------------------
/tests/mocked/test_billing.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=line-too-long
2 |
3 | """Mock tests for the billing API resource."""
4 | import responses
5 | from responses import matchers
6 |
7 | from pydo import Client
8 |
9 |
10 | @responses.activate
11 | def test_get_customer_balance(mock_client: Client, mock_client_url):
12 | """Mocks billing's GET customer balance operation."""
13 | expected = {
14 | "month_to_date_balance": "0.00",
15 | "account_balance": "0.00",
16 | "month_to_date_usage": "0.00",
17 | "generated_at": "2019-07-09T15:01:12Z",
18 | }
19 | responses.add(
20 | responses.GET, f"{mock_client_url}/v2/customers/my/balance", json=expected
21 | )
22 | balance = mock_client.balance.get()
23 |
24 | assert balance == expected
25 |
26 |
27 | @responses.activate
28 | def test_list_billing_history(mock_client: Client, mock_client_url):
29 | """Mocks billing's GET billing history operation."""
30 | expected = {
31 | "billing_history": [
32 | {
33 | "description": "Invoice for May 2018",
34 | "amount": "12.34",
35 | "invoice_id": "123",
36 | "invoice_uuid": "example-uuid",
37 | "date": "2018-06-01T08:44:38Z",
38 | "type": "Invoice",
39 | },
40 | {
41 | "description": "Payment (MC 2018)",
42 | "amount": "-12.34",
43 | "date": "2018-06-02T08:44:38Z",
44 | "type": "Payment",
45 | },
46 | ],
47 | "links": {"pages": {}},
48 | "meta": {"total": 5},
49 | }
50 | responses.add(
51 | responses.GET,
52 | f"{mock_client_url}/v2/customers/my/billing_history",
53 | json=expected,
54 | )
55 | balance = mock_client.billing_history.list()
56 |
57 | assert balance == expected
58 |
59 |
60 | @responses.activate
61 | def test_list_invoices(mock_client: Client, mock_client_url):
62 | """Mocks billing's GET a list of invoices."""
63 | expected = {
64 | "invoices": [
65 | {
66 | "invoice_uuid": "22737513-0ea7-4206-8ceb-98a575af7681",
67 | "amount": "12.34",
68 | "invoice_period": "2019-12",
69 | },
70 | {
71 | "invoice_uuid": "fdabb512-6faf-443c-ba2e-665452332a9e",
72 | "amount": "23.45",
73 | "invoice_period": "2019-11",
74 | },
75 | ],
76 | "invoice_preview": {
77 | "invoice_uuid": "1afe95e6-0958-4eb0-8d9a-9c5060d3ef03",
78 | "amount": "34.56",
79 | "invoice_period": "2020-02",
80 | "updated_at": "2020-02-23T06:31:50Z",
81 | },
82 | "links": {"pages": {}},
83 | "meta": {"total": 70},
84 | }
85 | responses.add(
86 | responses.GET, f"{mock_client_url}/v2/customers/my/invoices", json=expected
87 | )
88 | balance = mock_client.invoices.list()
89 |
90 | assert balance == expected
91 |
92 |
93 | @responses.activate
94 | def test_list_invoices_with_pagination(mock_client: Client, mock_client_url):
95 | """Mocks billing's GET a list of invoices."""
96 | expected = {
97 | "invoices": [
98 | {
99 | "invoice_uuid": "22737513-0ea7-4206-8ceb-98a575af7681",
100 | "amount": "12.34",
101 | "invoice_period": "2019-12",
102 | },
103 | {
104 | "invoice_uuid": "fdabb512-6faf-443c-ba2e-665452332a9e",
105 | "amount": "23.45",
106 | "invoice_period": "2019-11",
107 | },
108 | ],
109 | "invoice_preview": {
110 | "invoice_uuid": "1afe95e6-0958-4eb0-8d9a-9c5060d3ef03",
111 | "amount": "34.56",
112 | "invoice_period": "2020-02",
113 | "updated_at": "2020-02-23T06:31:50Z",
114 | },
115 | "links": {
116 | "pages": {
117 | "next": "https://api.digitalocean.com/v2/customers/my/invoices?page=2&per_page=20",
118 | "last": "https://api.digitalocean.com/v2/customers/my/invoices?page=6&per_page=20",
119 | }
120 | },
121 | "meta": {"total": 6},
122 | }
123 | params = {"per_page": 20, "page": 1}
124 | responses.add(
125 | responses.GET,
126 | f"{mock_client_url}/v2/customers/my/invoices",
127 | json=expected,
128 | match=[matchers.query_param_matcher(params)],
129 | )
130 | balance = mock_client.invoices.list()
131 |
132 | assert balance == expected
133 |
134 |
135 | @responses.activate
136 | def test_get_invoice_by_uuid(mock_client: Client, mock_client_url):
137 | """Mocks billing's GET invoice by uuid."""
138 | expected = {
139 | "invoice_items": [
140 | {
141 | "product": "Kubernetes Clusters",
142 | "resource_uuid": "711157cb-37c8-4817-b371-44fa3504a39c",
143 | "group_description": "my-doks-cluster",
144 | "description": "a56e086a317d8410c8b4cfd1f4dc9f82",
145 | "amount": "12.34",
146 | "duration": "744",
147 | "duration_unit": "Hours",
148 | "start_time": "2020-01-01T00:00:00Z",
149 | "end_time": "2020-02-01T00:00:00Z",
150 | },
151 | {
152 | "product": "Spaces Subscription",
153 | "description": "Spaces ($5/mo 250GB storage & 1TB bandwidth)",
154 | "amount": "34.45",
155 | "duration": "744",
156 | "duration_unit": "Hours",
157 | "start_time": "2020-01-01T00:00:00Z",
158 | "end_time": "2020-02-01T00:00:00Z",
159 | },
160 | ],
161 | "links": {"pages": {}},
162 | "meta": {"total": 6},
163 | }
164 |
165 | responses.add(
166 | responses.GET, f"{mock_client_url}/v2/customers/my/invoices/1", json=expected
167 | )
168 | balance = mock_client.invoices.get_by_uuid(invoice_uuid=1)
169 |
170 | assert balance == expected
171 |
172 |
173 | @responses.activate
174 | def test_get_invoice_csv_by_uuid(mock_client: Client, mock_client_url):
175 | """Mocks billing's GET invoice CSV by uuid."""
176 | expected = "product,group_description,description,hours,\
177 | start,end,USD,project_name,category\
178 | Floating IPs,,Unused Floating IP - 1.1.1.1,100,2020-07-01 00:00:00\
179 | +0000,2020-07-22 18:14:39 +0000,$3.11,,iaas\
180 | Taxes,,STATE SALES TAX (6.25%),,2020-07-01 00:00:00\
181 | +0000,2020-07-31 23:59:59 +0000,$0.16,,iaas"
182 |
183 | responses.add(
184 | responses.GET,
185 | f"{mock_client_url}/v2/customers/my/invoices/1/csv",
186 | json=expected,
187 | )
188 | balance = mock_client.invoices.get_csv_by_uuid(invoice_uuid=1)
189 |
190 | assert balance == expected
191 |
192 |
193 | @responses.activate
194 | def test_get_invoice_pdf_by_uuid(mock_client: Client, mock_client_url):
195 | """Mocks billing's GET invoice PDF by uuid."""
196 | expected = "product,group_description,description,hours\
197 | ,start,end,USD,project_name,category\
198 | Floating IPs,,Unused Floating IP - 1.1.1.1,100,2020-07-01\
199 | 00:00:00 +0000,2020-07-22 18:14:39 +0000,$3.11,,iaas\
200 | Taxes,,STATE SALES TAX (6.25%),,2020-07-01 00:00:00 \
201 | +0000,2020-07-31 23:59:59 +0000,$0.16,,iaas"
202 |
203 | responses.add(
204 | responses.GET,
205 | f"{mock_client_url}/v2/customers/my/invoices/1/pdf",
206 | json=expected,
207 | )
208 | invoices = mock_client.invoices.get_pdf_by_uuid(invoice_uuid=1)
209 | list_in = list(invoices)
210 |
211 | assert "group_description" in str(list_in)
212 |
213 |
214 | @responses.activate
215 | def test_get_invoice_summary_by_uuid(mock_client: Client, mock_client_url):
216 | """Mocks billing's GET invoice summary by uuid."""
217 | expected = {
218 | "invoice_uuid": "1",
219 | "billing_period": "2020-01",
220 | "amount": "27.13",
221 | "user_name": "Sammy Shark",
222 | "user_billing_address": {
223 | "address_line1": "101 Shark Row",
224 | "city": "Atlantis",
225 | "region": "OC",
226 | "postal_code": "12345",
227 | "country_iso2_code": "US",
228 | "created_at": "2019-09-03T16:34:46.000+00:00",
229 | "updated_at": "2019-09-03T16:34:46.000+00:00",
230 | },
231 | "user_company": "DigitalOcean",
232 | "user_email": "sammy@digitalocean.com",
233 | "product_charges": {
234 | "name": "Product usage charges",
235 | "amount": "12.34",
236 | "items": [
237 | {"amount": "10.00", "name": "Spaces Subscription", "count": "1"},
238 | {"amount": "2.34", "name": "Database Clusters", "count": "1"},
239 | ],
240 | },
241 | "overages": {"name": "Overages", "amount": "3.45"},
242 | "taxes": {"name": "Taxes", "amount": "4.56"},
243 | "credits_and_adjustments": {"name": "Credits & adjustments", "amount": "6.78"},
244 | }
245 | responses.add(
246 | responses.GET,
247 | f"{mock_client_url}/v2/customers/my/invoices/1/summary",
248 | json=expected,
249 | )
250 | invoice = mock_client.invoices.get_summary_by_uuid(invoice_uuid="1")
251 |
252 | assert invoice == expected
253 |
--------------------------------------------------------------------------------
/tests/mocked/test_cdn.py:
--------------------------------------------------------------------------------
1 | """Mock tests for the CDN endpoints."""
2 |
3 | import responses
4 |
5 | from pydo import Client
6 |
7 |
8 | @responses.activate
9 | def test_create(mock_client: Client, mock_client_url):
10 | """Mock Create CDN"""
11 | expected = {
12 | "endpoint": {
13 | "id": "19f06b6a-3ace-4315-b086-499a0e521b76",
14 | "origin": "static-images.nyc3.digitaloceanspaces.com",
15 | "endpoint": "static-images.nyc3.cdn.digitaloceanspaces.com",
16 | "created_at": "2018-07-19T15:04:16Z",
17 | "ttl": 3600,
18 | }
19 | }
20 |
21 | responses.add(
22 | responses.POST, f"{mock_client_url}/v2/cdn/endpoints", json=expected, status=201
23 | )
24 |
25 | create_req = {"origin": "static-images.nyc3.digitaloceanspaces.com", "ttl": 3600}
26 | create_resp = mock_client.cdn.create_endpoint(create_req)
27 |
28 | assert create_resp == expected
29 |
30 |
31 | @responses.activate
32 | def test_list(mock_client: Client, mock_client_url):
33 | """Mock List CDN"""
34 |
35 | expected = {
36 | "endpoints": [
37 | {
38 | "id": "19f06b6a-3ace-4315-b086-499a0e521b76",
39 | "origin": "static-images.nyc3.digitaloceanspaces.com",
40 | "endpoint": "static-images.nyc3.cdn.digitaloceanspaces.com",
41 | "created_at": "2018-07-19T15:04:16Z",
42 | "certificate_id": "892071a0-bb95-49bc-8021-3afd67a210bf",
43 | "custom_domain": "static.example.com",
44 | "ttl": 3600,
45 | }
46 | ],
47 | "links": {},
48 | "meta": {"total": 1},
49 | }
50 |
51 | responses.add(
52 | responses.GET, f"{mock_client_url}/v2/cdn/endpoints", json=expected, status=200
53 | )
54 |
55 | list_resp = mock_client.cdn.list_endpoints()
56 |
57 | assert list_resp == expected
58 |
59 |
60 | @responses.activate
61 | def test_get(mock_client: Client, mock_client_url):
62 | """Mock Get CDN"""
63 |
64 | expected = {
65 | "endpoint": {
66 | "id": "1",
67 | "origin": "static-images.nyc3.digitaloceanspaces.com",
68 | "endpoint": "static-images.nyc3.cdn.digitaloceanspaces.com",
69 | "created_at": "2018-07-19T15:04:16Z",
70 | "ttl": 3600,
71 | }
72 | }
73 |
74 | responses.add(
75 | responses.GET,
76 | f"{mock_client_url}/v2/cdn/endpoints/1",
77 | json=expected,
78 | status=200,
79 | )
80 |
81 | get_resp = mock_client.cdn.get_endpoint("1")
82 |
83 | assert get_resp == expected
84 |
85 |
86 | @responses.activate
87 | def test_update(mock_client: Client, mock_client_url):
88 | """Mock Update CDN"""
89 |
90 | expected = {
91 | "endpoint": {
92 | "id": "1",
93 | "origin": "static-images.nyc3.digitaloceanspaces.com",
94 | "endpoint": "static-images.nyc3.cdn.digitaloceanspaces.com",
95 | "created_at": "2018-07-19T15:04:16Z",
96 | "ttl": 3600,
97 | }
98 | }
99 |
100 | responses.add(
101 | responses.PUT,
102 | f"{mock_client_url}/v2/cdn/endpoints/1",
103 | json=expected,
104 | status=200,
105 | )
106 |
107 | update_req = {
108 | "ttl": 3600,
109 | "certificate_id": "892071a0-bb95-49bc-8021-3afd67a210bf",
110 | "custom_domain": "static.example.com",
111 | }
112 |
113 | update_resp = mock_client.cdn.update_endpoints("1", update_req)
114 |
115 | assert update_resp == expected
116 |
117 |
118 | @responses.activate
119 | def test_delete(mock_client: Client, mock_client_url):
120 | """Mock Delete CDN"""
121 |
122 | responses.add(responses.DELETE, f"{mock_client_url}/v2/cdn/endpoints/1", status=204)
123 |
124 | delete_resp = mock_client.cdn.delete_endpoint("1")
125 |
126 | assert delete_resp is None
127 |
128 |
129 | @responses.activate
130 | def test_purge(mock_client: Client, mock_client_url):
131 | """Mock Purge CDN"""
132 |
133 | responses.add(
134 | responses.DELETE, f"{mock_client_url}/v2/cdn/endpoints/1/cache", status=204
135 | )
136 |
137 | purge_req = {"files": ["path/to/image.png", "path/to/css/*"]}
138 |
139 | purge_resp = mock_client.cdn.purge_cache("1", purge_req)
140 |
141 | assert purge_resp is None
142 |
--------------------------------------------------------------------------------
/tests/mocked/test_client_customizations.py:
--------------------------------------------------------------------------------
1 | """Client customization tests
2 |
3 | These tests aren't essential but serve as good examples for using the client with
4 | custom configuration.
5 | """
6 |
7 | import logging
8 | import re
9 |
10 | import responses
11 | from pydo import Client
12 |
13 | # pylint: disable=missing-function-docstring
14 |
15 |
16 | def test_custom_headers():
17 | custom_headers = {"x-request-id": "fakeid"}
18 | client = Client("", headers=custom_headers)
19 |
20 | # pylint: disable=protected-access
21 | assert client._config.headers_policy.headers == custom_headers
22 |
23 |
24 | def test_custom_timeout():
25 | timeout = 300
26 | client = Client("", timeout=timeout)
27 |
28 | # pylint: disable=protected-access
29 | assert client._config.retry_policy.timeout == timeout
30 |
31 |
32 | def test_custom_endpoint():
33 | endpoint = "https://fake.local"
34 | client = Client("", endpoint=endpoint)
35 |
36 | # pylint: disable=protected-access
37 | assert client._client._base_url == endpoint
38 |
39 |
40 | def test_custom_logger():
41 | name = "mockedtests"
42 | logger = logging.getLogger(name)
43 | client = Client("", logger=logger)
44 |
45 | # pylint: disable=protected-access
46 | assert client._config.http_logging_policy.logger.name == name
47 |
48 |
49 | @responses.activate
50 | def test_custom_user_agent():
51 | user_agent = "test"
52 | fake_endpoint = "https://fake.local"
53 | client = Client(
54 | "",
55 | endpoint=fake_endpoint,
56 | user_agent=user_agent,
57 | user_agent_overwrite=True,
58 | )
59 |
60 | full_user_agent_pattern = r"^test azsdk-python-pydo\/.+Python\/.+\(.+\)$"
61 |
62 | # pylint: disable=protected-access
63 | got_user_agent = client._config.user_agent_policy.user_agent
64 | match = re.match(full_user_agent_pattern, got_user_agent)
65 | assert match is not None
66 |
67 | fake_url = f"{fake_endpoint}/v2/account"
68 | responses.add(
69 | responses.GET,
70 | fake_url,
71 | match=[responses.matchers.header_matcher({"User-Agent": user_agent})],
72 | )
73 |
74 | client.account.get(user_agent=user_agent)
75 | assert responses.assert_call_count(fake_url, count=1)
76 |
--------------------------------------------------------------------------------
/tests/mocked/test_image_actions.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=line-too-long
2 | # pylint: disable=duplicate-code
3 | """Mock tests for the Image Actions API resource"""
4 |
5 | import responses
6 |
7 | from pydo import Client
8 |
9 |
10 | @responses.activate
11 | def test_image_actions_get(mock_client: Client, mock_client_url):
12 | """Tests Retrieving an Existing Image Action"""
13 | image_id = 7938269
14 | action_id = 36805527
15 | expected = {
16 | "action": {
17 | "id": action_id,
18 | "status": "in-progress",
19 | "type": "transfer",
20 | "started_at": "2014-11-14T16:42:45Z",
21 | "completed_at": None,
22 | "resource_id": image_id,
23 | "resource_type": "image",
24 | "region": {
25 | "name": "New York 3",
26 | "slug": "nyc3",
27 | "sizes": ["s-1vcpu-3gb", "m-1vcpu-8gb"],
28 | "features": ["private_networking", "image_transfer"],
29 | "available": True,
30 | },
31 | "region_slug": "nyc3",
32 | }
33 | }
34 |
35 | responses.add(
36 | responses.GET,
37 | f"{mock_client_url}/v2/images/{image_id}/actions/{action_id}",
38 | json=expected,
39 | )
40 |
41 | get_resp = mock_client.image_actions.get(image_id=image_id, action_id=action_id)
42 |
43 | assert get_resp == expected
44 |
45 |
46 | @responses.activate
47 | def test_image_actions_list(mock_client: Client, mock_client_url):
48 | """Mocks the image actions list operation"""
49 | image_id = 7555620
50 | expected = {
51 | "actions": [
52 | {
53 | "id": 29410565,
54 | "status": "completed",
55 | "type": "transfer",
56 | "started_at": "2014-07-25T15:04:21Z",
57 | "completed_at": "2014-07-25T15:10:20Z",
58 | "resource_id": image_id,
59 | "resource_type": "image",
60 | "region": {
61 | "name": "New York 2",
62 | "slug": "nyc2",
63 | "sizes": ["s-1vcpu-3gb", "s-24vcpu-128gb"],
64 | "features": ["private_networking", "image_transfer"],
65 | "available": True,
66 | },
67 | "region_slug": "nyc2",
68 | }
69 | ],
70 | "links": {
71 | "pages": {
72 | "last": "https://api.digitalocean.com/v2/images/{image_id}/actions?page=5&per_page=1",
73 | "next": "https://api.digitalocean.com/v2/images/{image_id}/actions?page=2&per_page=1",
74 | }
75 | },
76 | "meta": {"total": 5},
77 | }
78 |
79 | responses.add(
80 | responses.GET, f"{mock_client_url}/v2/images/{image_id}/actions", json=expected
81 | )
82 | list_resp = mock_client.image_actions.list(image_id=image_id)
83 |
84 | assert list_resp == expected
85 |
86 |
87 | @responses.activate
88 | def test_image_actions_post(mock_client: Client, mock_client_url):
89 | """Mocks the image actions post operation."""
90 | image_id = 7938269
91 | expected = {
92 | "action": {
93 | "id": 36805527,
94 | "status": "in-progress",
95 | "type": "transfer",
96 | "started_at": "2014-11-14T16:42:45Z",
97 | "completed_at": None,
98 | "resource_id": image_id,
99 | "resource_type": "image",
100 | "region": {
101 | "name": "New York 3",
102 | "slug": "nyc3",
103 | "sizes": ["s-1vcpu-3gb", "s-24vcpu-128gb"],
104 | "features": ["private_networking", "image_transfer"],
105 | "available": True,
106 | },
107 | "region_slug": "nyc3",
108 | }
109 | }
110 |
111 | responses.add(
112 | responses.POST,
113 | f"{mock_client_url}/v2/images/{image_id}/actions",
114 | json=expected,
115 | status=201,
116 | )
117 |
118 | create_req = {"type": "convert"}
119 | post_resp = mock_client.image_actions.post(image_id=image_id, body=create_req)
120 |
121 | assert post_resp == expected
122 |
--------------------------------------------------------------------------------
/tests/mocked/test_images.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=line-too-long
2 | # pylint: disable=duplicate-code
3 | """Mock tests for the Images API resource"""
4 |
5 | import responses
6 |
7 | from pydo import Client
8 |
9 |
10 | @responses.activate
11 | def test_images_list(mock_client: Client, mock_client_url):
12 | """Mocks the images list operation"""
13 | expected = {
14 | "images": [
15 | {
16 | "id": 7555620,
17 | "name": "Nifty New Snapshot",
18 | "distribution": "Ubuntu",
19 | "slug": None,
20 | "public": False,
21 | "regions": ["nyc2", "nyc3"],
22 | "created_at": "2014-11-04T22:23:02Z",
23 | "type": "snapshot",
24 | "min_disk_size": 20,
25 | "size_gigabytes": 2.34,
26 | "description": "",
27 | "tags": [],
28 | "status": "available",
29 | "error_message": "",
30 | },
31 | {
32 | "id": 7555621,
33 | "name": "Another Snapshot",
34 | "distribution": "Ubuntu",
35 | "slug": None,
36 | "public": False,
37 | "regions": ["nyc2"],
38 | "created_at": "2014-11-04T22:23:02Z",
39 | "type": "snapshot",
40 | "min_disk_size": 20,
41 | "size_gigabytes": 2.34,
42 | "description": "",
43 | "tags": [],
44 | "status": "available",
45 | "error_message": "",
46 | },
47 | {
48 | "id": 63663980,
49 | "name": "20.04 (LTS) x64",
50 | "distribution": "Ubuntu",
51 | "slug": "ubuntu-20-04-x64",
52 | "public": True,
53 | "regions": ["nyc2", "nyc3"],
54 | "created_at": "2020-05-15T05:47:50Z",
55 | "type": "snapshot",
56 | "min_disk_size": 20,
57 | "size_gigabytes": 2.36,
58 | "description": "",
59 | "tags": [],
60 | "status": "available",
61 | "error_message": "",
62 | },
63 | {
64 | "id": 7555621,
65 | "name": "A custom image",
66 | "distribution": "Arch Linux",
67 | "slug": None,
68 | "public": False,
69 | "regions": ["nyc3"],
70 | "created_at": "2014-11-04T22:23:02Z",
71 | "type": "custom",
72 | "min_disk_size": 20,
73 | "size_gigabytes": 2.34,
74 | "description": "",
75 | "tags": [],
76 | "status": "available",
77 | "error_message": "",
78 | },
79 | {
80 | "id": 7555621,
81 | "name": "An APP image",
82 | "distribution": "Fedora",
83 | "slug": None,
84 | "public": False,
85 | "regions": ["nyc2", "nyc3"],
86 | "created_at": "2014-11-04T22:23:02Z",
87 | "type": "snapshot",
88 | "min_disk_size": 20,
89 | "size_gigabytes": 2.34,
90 | "description": "",
91 | "tags": [],
92 | "status": "available",
93 | "error_message": "",
94 | },
95 | {
96 | "id": 7555621,
97 | "name": "A simple tagged image",
98 | "distribution": "CentOS",
99 | "slug": None,
100 | "public": False,
101 | "regions": ["nyc2", "nyc3"],
102 | "created_at": "2014-11-04T22:23:02Z",
103 | "type": "snapshot",
104 | "min_disk_size": 20,
105 | "size_gigabytes": 2.34,
106 | "description": "",
107 | "tags": ["simple-image"],
108 | "status": "available",
109 | "error_message": "",
110 | },
111 | ],
112 | "links": {"pages": {}},
113 | "meta": {"total": 6},
114 | }
115 |
116 | responses.add(responses.GET, f"{mock_client_url}/v2/images", json=expected)
117 | list_resp = mock_client.images.list()
118 |
119 | assert list_resp == expected
120 |
121 |
122 | @responses.activate
123 | def test_images_get(mock_client: Client, mock_client_url):
124 | """Tests Retrieving an Existing Image"""
125 | expected = {
126 | "image": {
127 | "id": 6918990,
128 | "name": "14.04 x64",
129 | "distribution": "Ubuntu",
130 | "slug": "ubuntu-16-04-x64",
131 | "public": True,
132 | "regions": [
133 | "nyc1",
134 | "ams1",
135 | "sfo1",
136 | "nyc2",
137 | "ams2",
138 | "sgp1",
139 | "lon1",
140 | "nyc3",
141 | "ams3",
142 | "nyc3",
143 | ],
144 | "created_at": "2014-10-17T20:24:33Z",
145 | "min_disk_size": 20,
146 | "size_gigabytes": 2.34,
147 | "description": "",
148 | "tags": [],
149 | "status": "available",
150 | "error_message": "",
151 | }
152 | }
153 |
154 | responses.add(responses.GET, f"{mock_client_url}/v2/images/6918990", json=expected)
155 |
156 | get_resp = mock_client.images.get(image_id="6918990")
157 |
158 | assert get_resp == expected
159 |
160 |
161 | @responses.activate
162 | def test_images_delete(mock_client: Client, mock_client_url):
163 | """Test Images Delete"""
164 |
165 | responses.add(
166 | responses.DELETE,
167 | f"{mock_client_url}/v2/images/6372321",
168 | status=204,
169 | )
170 | del_resp = mock_client.images.delete(image_id="6372321")
171 |
172 | assert del_resp is None
173 |
174 |
175 | @responses.activate
176 | def test_images_update(mock_client: Client, mock_client_url):
177 | """Mocks the images update operation."""
178 | expected = {
179 | "image": {
180 | "id": 7938391,
181 | "name": "new-image-name",
182 | "distribution": "Ubuntu",
183 | "slug": None,
184 | "public": False,
185 | "regions": ["nyc3", "nyc3"],
186 | "created_at": "2014-11-14T16:44:03Z",
187 | "min_disk_size": 20,
188 | "size_gigabytes": 2.34,
189 | "description": "",
190 | "tags": [],
191 | "status": "available",
192 | "error_message": "",
193 | }
194 | }
195 | responses.add(
196 | responses.PUT,
197 | f"{mock_client_url}/v2/images/7938391",
198 | json=expected,
199 | status=200,
200 | )
201 |
202 | update = {
203 | "name": "Nifty New Snapshot",
204 | "distribution": "Ubuntu",
205 | "description": " ",
206 | }
207 | firewall = mock_client.images.update(body=update, image_id="7938391")
208 |
209 | assert firewall == expected
210 |
211 |
212 | @responses.activate
213 | def test_images_create(mock_client: Client, mock_client_url):
214 | """Mocks the images create operation."""
215 | expected = {
216 | "image": {
217 | "created_at": "2018-09-20T19:28:00Z",
218 | "description": "Cloud-optimized image w/ small footprint",
219 | "distribution": "Ubuntu",
220 | "error_message": "",
221 | "id": 38413969,
222 | "name": "ubuntu-18.04-minimal",
223 | "regions": [],
224 | "type": "custom",
225 | "tags": ["base-image", "prod"],
226 | "status": "NEW",
227 | }
228 | }
229 |
230 | responses.add(
231 | responses.POST,
232 | f"{mock_client_url}/v2/images",
233 | json=expected,
234 | status=202,
235 | )
236 |
237 | create_req = {
238 | "name": "ubuntu-18.04-minimal",
239 | "url": "http://cloud-images.ubuntu.com/minimal/releases/bionic/release/ubuntu-18.04-minimal-cloudimg-amd64.img",
240 | "distribution": "Ubuntu",
241 | "region": "nyc3",
242 | "description": "Cloud-optimized image w/ small footprint",
243 | "tags": ["base-image", "prod"],
244 | }
245 | firewall = mock_client.images.create_custom(body=create_req)
246 |
247 | assert firewall == expected
248 |
--------------------------------------------------------------------------------
/tests/mocked/test_one_click.py:
--------------------------------------------------------------------------------
1 | """Mock tests for the 1-click API resource."""
2 |
3 | import responses
4 | from responses import matchers
5 |
6 | from pydo import Client
7 |
8 |
9 | @responses.activate
10 | def test_one_click_list_applications(mock_client: Client, mock_client_url):
11 | """Mocks the 1-clicks list applications operation."""
12 | expected = {
13 | "1_clicks": [
14 | {"slug": "monitoring", "type": "kubernetes"},
15 | {"slug": "wordpress-18-04", "type": "droplet"},
16 | ]
17 | }
18 | responses.add(responses.GET, f"{mock_client_url}/v2/1-clicks", json=expected)
19 |
20 | one_click_apps = mock_client.one_clicks.list()
21 | assert one_click_apps == expected
22 |
23 |
24 | @responses.activate
25 | def test_one_click_list_applications_with_query(mock_client: Client, mock_client_url):
26 | """Mocks list 1-click applications with query"""
27 |
28 | expected = {
29 | "1_clicks": [
30 | {"slug": "wordpress-18-04", "type": "droplet"},
31 | ]
32 | }
33 |
34 | responses.add(
35 | responses.GET,
36 | f"{mock_client_url}/v2/1-clicks",
37 | json=expected,
38 | match=[matchers.query_param_matcher({"type": "droplet"})],
39 | )
40 |
41 | one_click_apps = mock_client.one_clicks.list(type="droplet")
42 | assert one_click_apps == expected
43 |
44 |
45 | @responses.activate
46 | def test_one_click_install_kubernetes(mock_client: Client, mock_client_url):
47 | """Mocks install kubernetes 1-click applciation"""
48 |
49 | expected = {"message": "Successfully kicked off addon job."}
50 |
51 | responses.add(
52 | responses.POST,
53 | f"{mock_client_url}/v2/1-clicks/kubernetes",
54 | json=expected,
55 | )
56 |
57 | install_resp = mock_client.one_clicks.install_kubernetes(expected)
58 | assert install_resp == expected
59 |
--------------------------------------------------------------------------------
/tests/mocked/test_regions.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=duplicate-code
2 | """Mock tests for the Regions API resource"""
3 |
4 | import responses
5 |
6 | from pydo import Client
7 |
8 |
9 | @responses.activate
10 | def test_list_regions(mock_client: Client, mock_client_url):
11 | """Mocks the regions list operation"""
12 | expected = {
13 | "regions": [
14 | {
15 | "name": "New York 3",
16 | "slug": "nyc3",
17 | "features": [
18 | "private_networking",
19 | "backups",
20 | "ipv6",
21 | "metadata",
22 | "install_agent",
23 | "storage",
24 | "image_transfer",
25 | ],
26 | "available": True,
27 | "sizes": [
28 | "s-1vcpu-1gb",
29 | "s-1vcpu-2gb",
30 | "s-1vcpu-3gb",
31 | "s-2vcpu-2gb",
32 | "s-3vcpu-1gb",
33 | "s-2vcpu-4gb",
34 | "s-4vcpu-8gb",
35 | "s-6vcpu-16gb",
36 | "s-8vcpu-32gb",
37 | "s-12vcpu-48gb",
38 | "s-16vcpu-64gb",
39 | "s-20vcpu-96gb",
40 | "s-24vcpu-128gb",
41 | "s-32vcpu-192g",
42 | ],
43 | }
44 | ],
45 | "links": {
46 | "pages": {
47 | "last": "https://api.digitalocean.com/v2/regions?page=13&per_page=1",
48 | "next": "https://api.digitalocean.com/v2/regions?page=2&per_page=1",
49 | }
50 | },
51 | "meta": {"total": 1},
52 | }
53 |
54 | responses.add(responses.GET, f"{mock_client_url}/v2/regions", json=expected)
55 | list_resp = mock_client.regions.list()
56 |
57 | assert list_resp == expected
58 |
--------------------------------------------------------------------------------
/tests/mocked/test_sizes.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=duplicate-code
2 | """Mock tests for the Sizes API resource"""
3 |
4 | import responses
5 |
6 | from pydo import Client
7 |
8 |
9 | @responses.activate
10 | def test_list_sizes(mock_client: Client, mock_client_url):
11 | """Mock the sizes list operation"""
12 | expected = {
13 | "sizes": [
14 | {
15 | "slug": "s-1vcpu-1gb",
16 | "memory": 1024,
17 | "vcpus": 1,
18 | "disk": 25,
19 | "transfer": 1,
20 | "price_monthly": 5,
21 | "price_hourly": 0.00743999984115362,
22 | "regions": [
23 | "ams2",
24 | "ams3",
25 | "blr1",
26 | "fra1",
27 | "lon1",
28 | "nyc1",
29 | "nyc2",
30 | "nyc3",
31 | "sfo1",
32 | "sfo2",
33 | "sfo3",
34 | "sgp1",
35 | "tor1",
36 | ],
37 | "available": True,
38 | "description": "Basic",
39 | }
40 | ],
41 | "links": {
42 | "pages": {
43 | "last": "https://api.digitalocean.com/v2/sizes?page=64&per_page=1",
44 | "next": "https://api.digitalocean.com/v2/sizes?page=2&per_page=1",
45 | }
46 | },
47 | "meta": {"total": 64},
48 | }
49 | responses.add(responses.GET, f"{mock_client_url}/v2/sizes", json=expected)
50 | list_resp = mock_client.sizes.list()
51 |
52 | assert list_resp == expected
53 |
--------------------------------------------------------------------------------
/tests/mocked/test_snapshots.py:
--------------------------------------------------------------------------------
1 | """Mock tests for the Snapshots API resource"""
2 |
3 | import responses
4 |
5 | from pydo import Client
6 |
7 |
8 | @responses.activate
9 | def test_snapshots_list(mock_client: Client, mock_client_url):
10 | """Mocks the snapshots list operation"""
11 | expected = {
12 | "snapshots": [
13 | {
14 | "id": "6372321",
15 | "name": "web-01-1595954862243",
16 | "created_at": "2020-07-28T16:47:44Z",
17 | "regions": ["nyc3", "sfo3"],
18 | "resource_id": "200776916",
19 | "resource_type": "droplet",
20 | "min_disk_size": 25,
21 | "size_gigabytes": 2.34,
22 | "tags": ["web", "env:prod"],
23 | },
24 | {
25 | "id": "fbe805e8-866b-11e6-96bf-000f53315a41",
26 | "name": "pvc-01-1595954862243",
27 | "created_at": "2019-09-28T23:14:30Z",
28 | "regions": ["nyc1"],
29 | "resource_id": "89bcc42f-85cf-11e6-a004-000f53315871",
30 | "resource_type": "volume",
31 | "min_disk_size": 2,
32 | "size_gigabytes": 0.1008,
33 | "tags": ["k8s"],
34 | },
35 | ],
36 | "links": {},
37 | "meta": {"total": 2},
38 | }
39 |
40 | responses.add(responses.GET, f"{mock_client_url}/v2/snapshots", json=expected)
41 | list_resp = mock_client.snapshots.list()
42 |
43 | assert list_resp == expected
44 |
45 |
46 | @responses.activate
47 | def test_snapshots_get(mock_client: Client, mock_client_url):
48 | """Tests Retrieving an Existing Snapshot"""
49 | expected = {
50 | "snapshot": {
51 | "id": "6372321",
52 | "name": "web-01-1595954862243",
53 | "created_at": "2020-07-28T16:47:44Z",
54 | "regions": ["nyc3", "sfo3"],
55 | "min_disk_size": 25,
56 | "size_gigabytes": 2.34,
57 | "resource_id": "200776916",
58 | "resource_type": "droplet",
59 | "tags": ["web", "env:prod"],
60 | }
61 | }
62 |
63 | responses.add(
64 | responses.GET, f"{mock_client_url}/v2/snapshots/6372321", json=expected
65 | )
66 |
67 | get_resp = mock_client.snapshots.get(snapshot_id="6372321")
68 |
69 | assert get_resp == expected
70 |
71 |
72 | @responses.activate
73 | def test_snapshots_delete(mock_client: Client, mock_client_url):
74 | """Test Snapshots Delete"""
75 |
76 | responses.add(
77 | responses.DELETE,
78 | f"{mock_client_url}/v2/snapshots/6372321",
79 | status=204,
80 | )
81 | del_resp = mock_client.snapshots.delete(snapshot_id="6372321")
82 |
83 | assert del_resp is None
84 |
--------------------------------------------------------------------------------
/tests/mocked/test_ssh_keys.py:
--------------------------------------------------------------------------------
1 | """Mock tests for the keys API resource."""
2 |
3 | import responses
4 | from responses import matchers
5 |
6 | from pydo import Client
7 |
8 |
9 | @responses.activate
10 | def test_list_ssh_keys(mock_client: Client, mock_client_url):
11 | """Tests the SSH keys list operation."""
12 | expected = {
13 | "ssh_keys": [
14 | {
15 | "id": 1234,
16 | "public_key": "ssh-rsa aaaBBBccc123 key",
17 | "name": "key",
18 | "fingerprint": "17:23:a1:4f:55:4b:59:c6:ad:f7:69:dc:4e:85:e4:8a",
19 | },
20 | {
21 | "id": 5678,
22 | "public_key": "ssh-rsa longKeyString test",
23 | "name": "test",
24 | "fingerprint": "0a:56:d2:46:64:64:12:95:34:ce:e7:vf:0f:c8:5a:d3",
25 | },
26 | ],
27 | "links": {"pages": {}},
28 | "meta": {"total": 2},
29 | }
30 |
31 | responses.add(responses.GET, f"{mock_client_url}/v2/account/keys", json=expected)
32 | keys = mock_client.ssh_keys.list()
33 |
34 | assert keys == expected
35 |
36 |
37 | @responses.activate
38 | def test_list_ssh_keys_pagination(mock_client: Client, mock_client_url):
39 | """Tests the SSH keys list operation."""
40 | expected = {
41 | "ssh_keys": [
42 | {
43 | "id": 1234,
44 | "public_key": "ssh-rsa aaaBBBccc123 key",
45 | "name": "key",
46 | "fingerprint": "17:23:a1:4f:55:4b:59:c6:ad:f7:69:dc:4e:85:e4:8a",
47 | },
48 | {
49 | "id": 5678,
50 | "public_key": "ssh-rsa longKeyString test",
51 | "name": "test",
52 | "fingerprint": "0a:56:d2:46:64:64:12:95:34:ce:e7:vf:0f:c8:5a:d3",
53 | },
54 | ],
55 | "links": {
56 | "pages": {
57 | "next": "https://api.digitalocean.com/v2/account/keys?page=2",
58 | "last": "https://api.digitalocean.com/v2/account/keys?page=3",
59 | }
60 | },
61 | "meta": {"total": 6},
62 | }
63 |
64 | params = {"per_page": 2, "page": 2}
65 | responses.add(
66 | responses.GET,
67 | f"{mock_client_url}/v2/account/keys",
68 | json=expected,
69 | match=[matchers.query_param_matcher(params)],
70 | )
71 | keys = mock_client.ssh_keys.list(per_page=2, page=2)
72 |
73 | assert keys == expected
74 |
75 |
76 | @responses.activate
77 | def test_get_ssh_keys(mock_client: Client, mock_client_url):
78 | """Tests the SSH keys get operation."""
79 | expected = {
80 | "ssh_key": {
81 | "id": 1234,
82 | "public_key": "ssh-rsa aaaBBBccc123 key",
83 | "name": "key",
84 | "fingerprint": "17:23:a1:4f:55:4b:59:c6:ad:f7:69:dc:4e:85:e4:8a",
85 | }
86 | }
87 |
88 | responses.add(
89 | responses.GET,
90 | f"{mock_client_url}/v2/account/keys/1234",
91 | json=expected,
92 | )
93 | keys = mock_client.ssh_keys.get(ssh_key_identifier=1234)
94 |
95 | assert keys == expected
96 |
97 |
98 | @responses.activate
99 | def test_create_ssh_keys(mock_client: Client, mock_client_url):
100 | """Tests the SSH keys create operation."""
101 | expected = {
102 | "ssh_key": {
103 | "id": 1234,
104 | "public_key": "ssh-rsa aaaBBBccc123 key",
105 | "name": "key",
106 | "fingerprint": "17:23:a1:4f:55:4b:59:c6:ad:f7:69:dc:4e:85:e4:8a",
107 | }
108 | }
109 |
110 | responses.add(
111 | responses.POST, f"{mock_client_url}/v2/account/keys", json=expected, status=201
112 | )
113 | keys = mock_client.ssh_keys.create(
114 | {"name": "key", "public_key": "ssh-rsa aaaBBBccc123 key"}
115 | )
116 |
117 | assert keys == expected
118 |
119 |
120 | @responses.activate
121 | def test_update_ssh_keys(mock_client: Client, mock_client_url):
122 | """Tests the SSH keys create operation."""
123 | expected = {
124 | "ssh_key": {
125 | "id": 1234,
126 | "public_key": "ssh-rsa aaaBBBccc123 key",
127 | "name": "new-name",
128 | "fingerprint": "17:23:a1:4f:55:4b:59:c6:ad:f7:69:dc:4e:85:e4:8a",
129 | }
130 | }
131 |
132 | responses.add(
133 | responses.PUT, f"{mock_client_url}/v2/account/keys/1234", json=expected
134 | )
135 | keys = mock_client.ssh_keys.update(
136 | ssh_key_identifier=1234, body={"name": "new-name"}
137 | )
138 |
139 | assert keys == expected
140 |
141 |
142 | @responses.activate
143 | def test_delete_ssh_keys(mock_client: Client, mock_client_url):
144 | """Tests the SSH keys delete operation."""
145 | responses.add(
146 | responses.DELETE,
147 | f"{mock_client_url}/v2/account/keys/1234",
148 | status=204,
149 | )
150 |
151 | mock_client.ssh_keys.delete(ssh_key_identifier=1234)
152 |
153 |
154 | @responses.activate
155 | def test_ssh_keys_error_response(mock_client: Client, mock_client_url):
156 | """Tests the SSH keys error response."""
157 | expected = {
158 | "id": "not_found",
159 | "message": "The resource you requested could not be found.",
160 | }
161 |
162 | responses.add(
163 | responses.DELETE,
164 | f"{mock_client_url}/v2/account/keys/1234",
165 | json=expected,
166 | status=404,
167 | )
168 |
169 | error = mock_client.ssh_keys.delete(ssh_key_identifier=1234)
170 |
171 | assert error == expected
172 |
--------------------------------------------------------------------------------
/tests/mocked/test_tags.py:
--------------------------------------------------------------------------------
1 | """Mock tests for the tags API resource."""
2 |
3 | import responses
4 | from responses import matchers
5 |
6 | from pydo import Client
7 |
8 |
9 | @responses.activate
10 | def test_list_tags(mock_client: Client, mock_client_url):
11 | """Mocks the tags list operation."""
12 | expected = {
13 | "tags": [
14 | {
15 | "name": "tag-with-resources",
16 | "resources": {
17 | "count": 3,
18 | "last_tagged_uri": "https://api.digitalocean.com/v2/droplets/123",
19 | "droplets": {
20 | "count": 2,
21 | "last_tagged_uri": "https://api.digitalocean.com/v2/droplets/123", # pylint: disable=line-too-long
22 | },
23 | "images": {
24 | "count": 1,
25 | "last_tagged_uri": "https://api.digitalocean.com/v2/images/1234", # pylint: disable=line-too-long
26 | },
27 | "volumes": {"count": 0},
28 | "volume_snapshots": {"count": 0},
29 | "databases": {"count": 0},
30 | },
31 | },
32 | {
33 | "name": "tag-with-no-resources",
34 | "resources": {
35 | "count": 0,
36 | "droplets": {"count": 0},
37 | "images": {"count": 0},
38 | "volumes": {"count": 0},
39 | "volume_snapshots": {"count": 0},
40 | "databases": {"count": 0},
41 | },
42 | },
43 | ],
44 | "links": {},
45 | "meta": {"total": 2},
46 | }
47 |
48 | responses.add(responses.GET, f"{mock_client_url}/v2/tags", json=expected)
49 | tags = mock_client.tags.list()
50 |
51 | assert tags == expected
52 |
53 |
54 | @responses.activate
55 | def test_list_tags_pagination(mock_client: Client, mock_client_url):
56 | """Mocks the tags list operation."""
57 | expected = {
58 | "tags": [
59 | {
60 | "name": "tag-with-resources",
61 | "resources": {
62 | "count": 3,
63 | "last_tagged_uri": "https://api.digitalocean.com/v2/droplets/123",
64 | "droplets": {
65 | "count": 2,
66 | "last_tagged_uri": "https://api.digitalocean.com/v2/droplets/123", # pylint: disable=line-too-long
67 | },
68 | "images": {
69 | "count": 1,
70 | "last_tagged_uri": "https://api.digitalocean.com/v2/images/1234", # pylint: disable=line-too-long
71 | },
72 | "volumes": {"count": 0},
73 | "volume_snapshots": {"count": 0},
74 | "databases": {"count": 0},
75 | },
76 | },
77 | {
78 | "name": "tag-with-no-resources",
79 | "resources": {
80 | "count": 0,
81 | "droplets": {"count": 0},
82 | "images": {"count": 0},
83 | "volumes": {"count": 0},
84 | "volume_snapshots": {"count": 0},
85 | "databases": {"count": 0},
86 | },
87 | },
88 | ],
89 | "links": {
90 | "pages": {
91 | "next": "https://api.digitalocean.com/v2/tags?page=2",
92 | "last": "https://api.digitalocean.com/v2/tags?page=3",
93 | }
94 | },
95 | "meta": {"total": 6},
96 | }
97 |
98 | params = {"per_page": 2, "page": 2}
99 | responses.add(
100 | responses.GET,
101 | f"{mock_client_url}/v2/tags",
102 | json=expected,
103 | match=[matchers.query_param_matcher(params)],
104 | )
105 | tags = mock_client.tags.list(per_page=2, page=2)
106 |
107 | assert tags == expected
108 |
109 |
110 | @responses.activate
111 | def test_get_tag(mock_client: Client, mock_client_url):
112 | """Mocks the tags get operation."""
113 | expected = {
114 | "tag": {
115 | "name": "example-tag",
116 | "resources": {
117 | "count": 1,
118 | "last_tagged_uri": "https://api.digitalocean.com/v2/images/1234",
119 | "droplets": {"count": 0},
120 | "images": {
121 | "count": 1,
122 | "last_tagged_uri": "https://api.digitalocean.com/v2/images/1234",
123 | },
124 | "volumes": {"count": 0},
125 | "volume_snapshots": {"count": 0},
126 | "databases": {"count": 0},
127 | },
128 | }
129 | }
130 |
131 | responses.add(
132 | responses.GET, f"{mock_client_url}/v2/tags/example-tag", json=expected
133 | )
134 | tags = mock_client.tags.get(tag_id="example-tag")
135 |
136 | assert tags == expected
137 |
138 |
139 | @responses.activate
140 | def test_create_tag(mock_client: Client, mock_client_url):
141 | """Mocks the tags create operation."""
142 | expected = {
143 | "tag": {
144 | "name": "example-tag",
145 | "resources": {
146 | "count": 0,
147 | "droplets": {"count": 0},
148 | "images": {"count": 0},
149 | "volumes": {"count": 0},
150 | "volume_snapshots": {"count": 0},
151 | "databases": {"count": 0},
152 | },
153 | }
154 | }
155 |
156 | responses.add(
157 | responses.POST,
158 | f"{mock_client_url}/v2/tags",
159 | json=expected,
160 | status=201,
161 | )
162 | tag = mock_client.tags.create(body={"name": "example-tag"})
163 |
164 | assert tag == expected
165 |
166 |
167 | @responses.activate
168 | def test_delete_tag(mock_client: Client, mock_client_url):
169 | """Mocks the tags delete operation."""
170 | responses.add(
171 | responses.DELETE,
172 | f"{mock_client_url}/v2/tags/example-tag",
173 | status=204,
174 | )
175 |
176 | mock_client.tags.delete(tag_id="example-tag")
177 |
178 |
179 | @responses.activate
180 | def test_assign_resources(mock_client: Client, mock_client_url):
181 | """Mocks the tags assign resources operation."""
182 |
183 | responses.add(
184 | responses.POST,
185 | f"{mock_client_url}/v2/tags/example-tag/resources",
186 | status=204,
187 | )
188 |
189 | req = {
190 | "resources": [
191 | {"resource_id": "1234", "resource_type": "droplet"},
192 | {"resource_id": "5678", "resource_type": "image"},
193 | {"resource_id": "aaa-bbb-ccc-111", "resource_type": "volume"},
194 | ]
195 | }
196 |
197 | mock_client.tags.assign_resources(tag_id="example-tag", body=req)
198 |
199 |
200 | @responses.activate
201 | def test_unassign_resources(mock_client: Client, mock_client_url):
202 | """Mocks the tags unassign resources operation."""
203 |
204 | responses.add(
205 | responses.DELETE,
206 | f"{mock_client_url}/v2/tags/example-tag/resources",
207 | status=204,
208 | )
209 |
210 | req = {
211 | "resources": [
212 | {"resource_id": "1234", "resource_type": "droplet"},
213 | {"resource_id": "5678", "resource_type": "image"},
214 | {"resource_id": "aaa-bbb-ccc-111", "resource_type": "volume"},
215 | ]
216 | }
217 |
218 | mock_client.tags.unassign_resources(tag_id="example-tag", body=req)
219 |
--------------------------------------------------------------------------------
/tests/mocked/test_vpcs.py:
--------------------------------------------------------------------------------
1 | # pylint: disable=duplicate-code
2 | """Mock tests for the VPCs resource"""
3 |
4 | import responses
5 |
6 | from pydo import Client
7 |
8 |
9 | @responses.activate
10 | def test_vpcs_create(mock_client: Client, mock_client_url):
11 | """Testing create a new VPC"""
12 |
13 | expected = {
14 | "vpc": {
15 | "name": "env.prod-vpc",
16 | "description": "VPC for production environment",
17 | "region": "nyc1",
18 | "ip_range": "10.10.10.0/24",
19 | "default": "true",
20 | "id": "5a4981aa-9653-4bd1-bef5-d6bff52042e4",
21 | "urn": "do:droplet:13457723",
22 | "created_at": "2020-03-13T19:20:47.442049222Z",
23 | }
24 | }
25 |
26 | responses.add(
27 | responses.POST,
28 | f"{mock_client_url}/v2/vpcs",
29 | json=expected,
30 | status=201,
31 | )
32 |
33 | create_resp = mock_client.vpcs.create(
34 | {
35 | "name": "env.prod-vpc",
36 | "description": "VPC for production environment",
37 | "region": "nyc1",
38 | "ip_range": "10.10.10.0/24",
39 | }
40 | )
41 |
42 | assert create_resp == expected
43 |
44 |
45 | @responses.activate
46 | def test_vpcs_list(mock_client: Client, mock_client_url):
47 | """Test VPCs List"""
48 | expected = {
49 | "vpcs": [
50 | {
51 | "name": "env.prod-vpc",
52 | "description": "VPC for production environment",
53 | "region": "nyc1",
54 | "ip_range": "10.10.10.0/24",
55 | "id": "5a4981aa-9653-4bd1-bef5-d6bff52042e4",
56 | "urn": "do:vpc:5a4981aa-9653-4bd1-bef5-d6bff52042e4",
57 | "default": "true",
58 | "created_at": "2020-03-13T19:20:47.442049222Z",
59 | },
60 | ],
61 | "links": {},
62 | "meta": {"total": 1},
63 | }
64 |
65 | responses.add(
66 | responses.GET,
67 | f"{mock_client_url}/v2/vpcs",
68 | json=expected,
69 | status=200,
70 | )
71 |
72 | list_resp = mock_client.vpcs.list()
73 |
74 | assert list_resp == expected
75 |
76 |
77 | @responses.activate
78 | def test_get_vpcs(mock_client: Client, mock_client_url):
79 | """Test VPCs Get"""
80 |
81 | expected = {
82 | "vpc": {
83 | "name": "env.prod-vpc",
84 | "description": "VPC for production environment",
85 | "region": "nyc1",
86 | "ip_range": "10.10.10.0/24",
87 | "default": "True",
88 | "id": "5a4981aa-9653-4bd1-bef5-d6bff52042e4",
89 | "urn": "do:droplet:13457723",
90 | "created_at": "2020-03-13T19:20:47.442049222Z",
91 | }
92 | }
93 |
94 | responses.add(
95 | responses.GET,
96 | f"{mock_client_url}/v2/vpcs/5a4981aa-9653-4bd1-bef5-d6bff52042e4",
97 | json=expected,
98 | status=200,
99 | )
100 |
101 | get_resp = mock_client.vpcs.get(vpc_id="5a4981aa-9653-4bd1-bef5-d6bff52042e4")
102 |
103 | assert get_resp == expected
104 |
105 |
106 | @responses.activate
107 | def test_update_vpcs(mock_client: Client, mock_client_url):
108 | """Test VPCs Update"""
109 | expected = {
110 | "vpc": {
111 | "name": "env.prod-vpc",
112 | "description": "VPC for production environment",
113 | "region": "nyc1",
114 | "ip_range": "10.10.10.0/24",
115 | "default": "True",
116 | "id": "5a4981aa-9653-4bd1-bef5-d6bff52042e4",
117 | "urn": "do:droplet:13457723",
118 | "created_at": "2020-03-13T19:20:47.442049222Z",
119 | }
120 | }
121 |
122 | responses.add(
123 | responses.PUT,
124 | f"{mock_client_url}/v2/vpcs/5a4981aa-9653-4bd1-bef5-d6bff52042e4",
125 | json=expected,
126 | status=200,
127 | )
128 |
129 | update_resp = mock_client.vpcs.update(
130 | vpc_id="5a4981aa-9653-4bd1-bef5-d6bff52042e4",
131 | body={
132 | "name": "env.prod-vpc",
133 | "description": "VPC for production environment",
134 | "default": "True",
135 | },
136 | )
137 |
138 | assert update_resp == expected
139 |
140 |
141 | @responses.activate
142 | def test_patch_vpcs(mock_client: Client, mock_client_url):
143 | """Test VPCs Patch Update"""
144 | expected = {
145 | "vpc": {
146 | "name": "env.prod-vpc",
147 | "description": "VPC for production environment",
148 | "region": "nyc1",
149 | "ip_range": "10.10.10.0/24",
150 | "default": "true",
151 | "id": "5a4981aa-9653-4bd1-bef5-d6bff52042e4",
152 | "urn": "do:droplet:13457723",
153 | "created_at": "2020-03-13T19:20:47.442049222Z",
154 | }
155 | }
156 |
157 | responses.add(
158 | responses.PATCH,
159 | f"{mock_client_url}/v2/vpcs/5a4981aa-9653-4bd1-bef5-d6bff52042e4",
160 | json=expected,
161 | status=200,
162 | )
163 |
164 | patch_resp = mock_client.vpcs.patch(
165 | vpc_id="5a4981aa-9653-4bd1-bef5-d6bff52042e4",
166 | body={
167 | "name": "env.prod-vpc",
168 | "description": "VPC for production environment",
169 | "default": "true",
170 | },
171 | )
172 |
173 | assert patch_resp == expected
174 |
175 |
176 | @responses.activate
177 | def test_delete_vpcs(mock_client: Client, mock_client_url):
178 | """Test VPCs Delete"""
179 |
180 | responses.add(
181 | responses.DELETE,
182 | f"{mock_client_url}/v2/vpcs/5a4981aa-9653-4bd1-bef5-d6bff52042e4",
183 | status=204,
184 | )
185 |
186 | del_resp = mock_client.vpcs.delete(vpc_id="5a4981aa-9653-4bd1-bef5-d6bff52042e4")
187 |
188 | assert del_resp is None
189 |
190 |
191 | @responses.activate
192 | def test_vpcs_list_members(mock_client: Client, mock_client_url):
193 | """Test VPCs List Members"""
194 | expected = {
195 | "members": [
196 | {
197 | "urn": "do:loadbalancer:fb294d78-d193-4cb2-8737-ea620993591b",
198 | "name": "nyc1-load-balancer-01",
199 | "created_at": "2020-03-13T19:30:48Z",
200 | },
201 | {
202 | "urn": "do:dbaas:13f7a2f6-43df-4c4a-8129-8733267ddeea",
203 | "name": "db-postgresql-nyc1-55986",
204 | "created_at": "2020-03-13T19:30:18Z",
205 | },
206 | {
207 | "urn": "do:kubernetes:da39d893-96e1-4e4d-971d-1fdda33a46b1",
208 | "name": "k8s-nyc1-1584127772221",
209 | "created_at": "2020-03-13T19:30:16Z",
210 | },
211 | {
212 | "urn": "do:droplet:86e29982-03a7-4946-8a07-a0114dff8754",
213 | "name": "ubuntu-s-1vcpu-1gb-nyc1-01",
214 | "created_at": "2020-03-13T19:29:20Z",
215 | },
216 | ],
217 | "links": {},
218 | "meta": {"total": 4},
219 | }
220 |
221 | responses.add(
222 | responses.GET,
223 | f"{mock_client_url}/v2/vpcs/1/members",
224 | json=expected,
225 | status=200,
226 | )
227 |
228 | list_resp = mock_client.vpcs.list_members(vpc_id=1)
229 |
230 | assert list_resp == expected
231 |
--------------------------------------------------------------------------------