├── awspub ├── tests │ ├── __init__.py │ ├── test_cli.py │ ├── fixtures │ │ ├── config2-mapping.yaml │ │ ├── config1.vmdk │ │ ├── config-minimal.yaml │ │ ├── config-invalid-s3-extra.yaml │ │ ├── config-valid-nonawspub.yaml │ │ ├── config3-duplicate-keys.yaml │ │ ├── config2.yaml │ │ └── config1.yaml │ ├── test_image_marketplace.py │ ├── test_common.py │ ├── test_api.py │ ├── test_s3.py │ ├── test_context.py │ ├── test_snapshot.py │ └── test_sns.py ├── __init__.py ├── exceptions.py ├── common.py ├── sns.py ├── context.py ├── cli │ └── __init__.py ├── image_marketplace.py ├── api.py ├── snapshot.py ├── configmodels.py └── s3.py ├── docs ├── config-samples │ ├── image.vmdk │ ├── config-minimal-sns.yaml.mapping │ ├── config-minimal-ssm.yaml.mapping │ ├── config-with-parameters.yaml.mapping │ ├── config-minimal.yaml │ ├── config-minimal-public.yaml │ ├── config-minimal-tags.yaml │ ├── config-with-parameters.yaml │ ├── config-multiple-images.yaml │ ├── config-minimal-image-tags.yaml │ ├── config-minimal-share.yaml │ ├── config-minimal-groups.yaml │ ├── config-minimal-ssm.yaml │ ├── config-minimal-sns.yaml │ └── config-minimal-marketplace.yaml ├── how_to │ ├── api.rst │ ├── index.rst │ ├── install.rst │ └── publish.rst ├── reference │ ├── logic.rst │ ├── index.rst │ ├── config_models.rst │ └── architecture.rst └── conf.py ├── _static ├── tag.png ├── favicon.png ├── header-nav.js ├── footer.js ├── github_issue_links.css ├── footer.css ├── github_issue_links.js ├── css │ └── cookie-banner.css ├── 404.svg ├── header.css ├── furo_colors.css └── custom.css ├── .sphinx ├── pa11y.json ├── spellingcheck.yaml ├── get_vale_conf.py └── build_requirements.py ├── .custom_wordlist.txt ├── .wokeignore ├── _templates ├── base.html ├── sidebar │ └── search.html ├── 404.html ├── page.html ├── header.html └── footer.html ├── requirements_parser.py ├── .gitignore ├── reuse └── links.txt ├── .github ├── workflows │ ├── automatic-doc-checks.yml │ ├── pr.yaml │ ├── sphinx-python-dependency-build-checks.yml │ └── publish-to-pypi.yaml └── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── SECURITY.md ├── readme.rst ├── renovate.json ├── .wordlist.txt ├── index.rst ├── .readthedocs.yaml ├── release-management.rst ├── make.bat ├── tox.ini ├── snap └── snapcraft.yaml ├── pyproject.toml ├── Makefile ├── Makefile.sp ├── conf.py └── custom_conf.py /awspub/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /awspub/tests/test_cli.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/config-samples/image.vmdk: -------------------------------------------------------------------------------- 1 | ../../awspub/tests/fixtures/config1.vmdk -------------------------------------------------------------------------------- /docs/config-samples/config-minimal-sns.yaml.mapping: -------------------------------------------------------------------------------- 1 | --- 2 | serial: 20171022 -------------------------------------------------------------------------------- /docs/config-samples/config-minimal-ssm.yaml.mapping: -------------------------------------------------------------------------------- 1 | --- 2 | version: "2.0.0" -------------------------------------------------------------------------------- /_static/tag.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/canonical/awspub/main/_static/tag.png -------------------------------------------------------------------------------- /awspub/tests/fixtures/config2-mapping.yaml: -------------------------------------------------------------------------------- 1 | key1: "value1" 2 | key2: "value$2" 3 | -------------------------------------------------------------------------------- /docs/config-samples/config-with-parameters.yaml.mapping: -------------------------------------------------------------------------------- 1 | --- 2 | serial: 20171022 -------------------------------------------------------------------------------- /_static/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/canonical/awspub/main/_static/favicon.png -------------------------------------------------------------------------------- /awspub/tests/fixtures/config1.vmdk: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/canonical/awspub/main/awspub/tests/fixtures/config1.vmdk -------------------------------------------------------------------------------- /awspub/__init__.py: -------------------------------------------------------------------------------- 1 | from awspub.api import cleanup, create, list, publish 2 | 3 | __all__ = ["create", "list", "publish", "cleanup"] 4 | -------------------------------------------------------------------------------- /.sphinx/pa11y.json: -------------------------------------------------------------------------------- 1 | { 2 | "chromeLaunchConfig": { 3 | "args": [ 4 | "--no-sandbox" 5 | ] 6 | }, 7 | "reporter": "cli", 8 | "standard": "WCAG2AA" 9 | } -------------------------------------------------------------------------------- /.custom_wordlist.txt: -------------------------------------------------------------------------------- 1 | AMI 2 | AMIs 3 | ARN 4 | ConfigModel 5 | GPL 6 | SSM 7 | Snapcraft 8 | Snapstore 9 | YAML 10 | awspub 11 | awspub's 12 | config 13 | https 14 | io 15 | readthedocs 16 | vmdk 17 | SNS 18 | -------------------------------------------------------------------------------- /.wokeignore: -------------------------------------------------------------------------------- 1 | # the cheat sheets contain a link to a repository with a block word which we 2 | # cannot avoid for now, ie 3 | # https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html 4 | doc-cheat-sheet* 5 | -------------------------------------------------------------------------------- /docs/config-samples/config-minimal.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | source: 3 | path: "image.vmdk" 4 | architecture: "x86_64" 5 | s3: 6 | bucket_name: "awspub-toabctl" 7 | images: 8 | "my-custom-image": 9 | boot_mode: "uefi-preferred" 10 | -------------------------------------------------------------------------------- /awspub/tests/fixtures/config-minimal.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | awspub: 3 | source: 4 | path: "config1.vmdk" 5 | architecture: "x86_64" 6 | 7 | s3: 8 | bucket_name: "bucket1" 9 | 10 | images: 11 | "my-custom-image": 12 | boot_mode: "uefi-preferred" 13 | -------------------------------------------------------------------------------- /docs/config-samples/config-minimal-public.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | source: 3 | path: "image.vmdk" 4 | architecture: "x86_64" 5 | s3: 6 | bucket_name: "awspub-toabctl" 7 | images: 8 | "my-custom-image": 9 | boot_mode: "uefi-preferred" 10 | public: true 11 | -------------------------------------------------------------------------------- /docs/config-samples/config-minimal-tags.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | source: 3 | path: "image.vmdk" 4 | architecture: "x86_64" 5 | s3: 6 | bucket_name: "awspub-toabctl" 7 | images: 8 | "my-custom-image": 9 | boot_mode: "uefi-preferred" 10 | tags: 11 | tag-key: "tag-value" 12 | -------------------------------------------------------------------------------- /docs/config-samples/config-with-parameters.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | source: 3 | path: "image.vmdk" 4 | architecture: "x86_64" 5 | s3: 6 | bucket_name: "awspub-toabctl" 7 | images: 8 | "my-custom-image-$serial": 9 | boot_mode: "uefi-preferred" 10 | regions: 11 | - eu-central-1 12 | -------------------------------------------------------------------------------- /_static/header-nav.js: -------------------------------------------------------------------------------- 1 | $(document).ready(function() { 2 | $(document).on("click", function () { 3 | $(".more-links-dropdown").hide(); 4 | }); 5 | 6 | $('.nav-more-links').click(function(event) { 7 | $('.more-links-dropdown').toggle(); 8 | event.stopPropagation(); 9 | }); 10 | }) 11 | -------------------------------------------------------------------------------- /_templates/base.html: -------------------------------------------------------------------------------- 1 | {% extends "furo/base.html" %} 2 | 3 | {% block theme_scripts %} 4 | 7 | {% endblock theme_scripts %} 8 | 9 | {# ru-fu: don't include the color variables from the conf.py file, but use a 10 |  separate CSS file to save space #} 11 | {% block theme_styles %} 12 | {% endblock theme_styles %} 13 | -------------------------------------------------------------------------------- /_static/footer.js: -------------------------------------------------------------------------------- 1 | $(document).ready(function() { 2 | $(document).on("click", function () { 3 | $(".all-contributors").hide(); 4 | $("#overlay").hide(); 5 | }); 6 | 7 | $('.display-contributors').click(function(event) { 8 | $('.all-contributors').toggle(); 9 | $("#overlay").toggle(); 10 | event.stopPropagation(); 11 | }); 12 | }) 13 | -------------------------------------------------------------------------------- /awspub/tests/fixtures/config-invalid-s3-extra.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | s3: 3 | bucket_name: "bucket1" 4 | invalid_field: "not allowed" # This is an invalid field 5 | source: 6 | path: "config1.vmdk" 7 | architecture: "x86_64" 8 | images: 9 | test-image: 10 | description: "Test Image" 11 | separate_snapshot: "False" 12 | boot_mode: "uefi-preferred" 13 | -------------------------------------------------------------------------------- /docs/how_to/api.rst: -------------------------------------------------------------------------------- 1 | How to use the API 2 | ================== 3 | 4 | `awspub` provides a high-level API which can be used 5 | to create and publish images. 6 | 7 | Assuming there is a configuration file and a configuration file mapping: 8 | 9 | .. code-block:: 10 | 11 | import awspub 12 | awspub.create("config.yaml", "mapping.yaml") 13 | awspub.publish("config.yaml", "mapping.yaml") 14 | -------------------------------------------------------------------------------- /requirements_parser.py: -------------------------------------------------------------------------------- 1 | import tomllib 2 | 3 | LOCK = 'poetry.lock' 4 | 5 | 6 | def _get_requirements() -> list[str]: 7 | c = _read_config() 8 | 9 | deps = set() 10 | 11 | for dep in c["package"]: 12 | deps.add(dep['name']) 13 | 14 | return list(deps) 15 | 16 | 17 | def _read_config() -> dict: 18 | with open(LOCK, 'rb') as f: 19 | config = tomllib.load(f) 20 | return config 21 | -------------------------------------------------------------------------------- /awspub/tests/fixtures/config-valid-nonawspub.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | s3: 3 | bucket_name: "bucket1" 4 | source: 5 | path: "config1.vmdk" 6 | architecture: "x86_64" 7 | images: 8 | test-image: 9 | description: "Test Image" 10 | separate_snapshot: "False" 11 | boot_mode: "uefi-preferred" 12 | notawspub: # to make sure config outside of toplevel `awspub` dict is allowed 13 | foo_bar: "irrelevant" 14 | -------------------------------------------------------------------------------- /_templates/sidebar/search.html: -------------------------------------------------------------------------------- 1 | 7 | 8 | -------------------------------------------------------------------------------- /awspub/tests/fixtures/config3-duplicate-keys.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | s3: 3 | bucket_name: "bucket1" 4 | 5 | source: 6 | path: "config1.vmdk" 7 | architecture: "x86_64" 8 | 9 | images: 10 | "test-image-1": 11 | description: | 12 | A test image 13 | boot_mode: "uefi" 14 | # second image with the same key 15 | "test-image-1": 16 | description: | 17 | A test image 18 | boot_mode: "uefi" 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | *.egg-info/ 3 | .eggs/ 4 | *.pyc 5 | .tox 6 | *~ 7 | *.snap 8 | docs-build/ 9 | config.yaml 10 | .coverage 11 | 12 | # docs 13 | /*env*/ 14 | .sphinx/venv/ 15 | .sphinx/warnings.txt 16 | .sphinx/.wordlist.dic 17 | .sphinx/requirements.txt 18 | .sphinx/.doctrees/ 19 | .sphinx/node_modules/ 20 | package*.json 21 | _build 22 | .DS_Store 23 | __pycache__ 24 | .idea/ 25 | oci/_external/*.rst 26 | .vscode/ 27 | .sphinx/styles/* 28 | .sphinx/vale.ini 29 | -------------------------------------------------------------------------------- /reuse/links.txt: -------------------------------------------------------------------------------- 1 | .. _reStructuredText style guide: https://canonical-documentation-with-sphinx-and-readthedocscom.readthedocs-hosted.com/style-guide/ 2 | .. _Read the Docs at Canonical: https://library.canonical.com/documentation/read-the-docs 3 | .. _How to publish documentation on Read the Docs: https://library.canonical.com/documentation/publish-on-read-the-docs 4 | .. _Example product documentation: https://canonical-example-product-documentation.readthedocs-hosted.com/ 5 | -------------------------------------------------------------------------------- /.github/workflows/automatic-doc-checks.yml: -------------------------------------------------------------------------------- 1 | name: Main Documentation Checks 2 | 3 | on: 4 | - push 5 | - pull_request 6 | - workflow_dispatch 7 | 8 | concurrency: 9 | group: ${{ github.workflow }}-${{ github.ref }} 10 | cancel-in-progress: true 11 | 12 | jobs: 13 | documentation-checks: 14 | uses: canonical/documentation-workflows/.github/workflows/documentation-checks.yaml@main 15 | with: 16 | working-directory: '.' 17 | python-version: '3.12' 18 | -------------------------------------------------------------------------------- /docs/config-samples/config-multiple-images.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | awspub: 3 | source: 4 | path: "image.vmdk" 5 | architecture: "x86_64" 6 | 7 | s3: 8 | bucket_name: "awspub-toabctl" 9 | 10 | images: 11 | "my-custom-image": 12 | boot_mode: "uefi-preferred" 13 | regions: 14 | - eu-central-1 15 | "my-custom-image-2": 16 | description: | 17 | Another image 18 | boot_mode: "uefi" 19 | regions: 20 | - eu-central-2 21 | -------------------------------------------------------------------------------- /docs/config-samples/config-minimal-image-tags.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | source: 3 | path: "image.vmdk" 4 | architecture: "x86_64" 5 | s3: 6 | bucket_name: "awspub-toabctl" 7 | images: 8 | "my-custom-image-1": 9 | boot_mode: "uefi-preferred" 10 | tags: 11 | key1: "value1" 12 | "my-custom-image-2": 13 | boot_mode: "uefi" 14 | tags: 15 | key2: "value2" 16 | tag-key: "another-value" 17 | tags: 18 | tag-key: "tag-value" 19 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Reporting a vulnerability 2 | To report a security issue, file a [Private Security Report](https://github.com/Canonical/awspub/security/advisories/new) 3 | with a description of the issue, the steps you took to create the issue, 4 | affected versions, and, if known, mitigations for the issue. 5 | The [Ubuntu Security disclosure and embargo policy](https://ubuntu.com/security/disclosure-policy) 6 | contains more information about what you can expect when you contact us and what we expect from you. 7 | -------------------------------------------------------------------------------- /docs/config-samples/config-minimal-share.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | source: 3 | path: "image.vmdk" 4 | architecture: "x86_64" 5 | s3: 6 | bucket_name: "awspub-toabctl" 7 | images: 8 | "my-custom-image": 9 | boot_mode: "uefi-preferred" 10 | share: 11 | - "123456789123" 12 | - "aws-cn:456789012345" 13 | - "arn:aws:organizations::123456789012:organization/o-123example" 14 | - "arn:aws-cn:organizations::334455667788:ou/o-123example/ou-1234-5example" 15 | -------------------------------------------------------------------------------- /docs/config-samples/config-minimal-groups.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | source: 3 | path: "image.vmdk" 4 | architecture: "arm64" 5 | s3: 6 | bucket_name: "awspub-toabctl" 7 | images: 8 | "my-custom-image-1": 9 | boot_mode: "uefi-preferred" 10 | groups: 11 | - "group1" 12 | regions: 13 | - us-west-1 14 | "my-custom-image-2": 15 | boot_mode: "uefi" 16 | groups: 17 | - "group2" 18 | regions: 19 | - us-east-1 20 | - ca-central-1 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. 16 | 2. 17 | 3. 18 | 19 | **Expected behavior** 20 | A clear and concise description of what you expected to happen. 21 | 22 | **Additional context** 23 | Add any other context about the problem here. 24 | -------------------------------------------------------------------------------- /readme.rst: -------------------------------------------------------------------------------- 1 | awspub - image publication for AWS EC2 2 | -------------------------------------- 3 | 4 | `awspub` can publish images (AMIs) based on .VMDK files 5 | to AWS EC2. 6 | 7 | Documentation 8 | ============= 9 | 10 | The documentation can be found under https://canonical-awspub.readthedocs-hosted.com/ . 11 | 12 | Report issues 13 | ============= 14 | 15 | Please use https://github.com/canonical/awspub/issues to report problems or ask 16 | questions. 17 | 18 | License 19 | ======= 20 | 21 | The project uses `GPL-3.0` as license. 22 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "config:recommended" 5 | ], 6 | "packageRules": [ 7 | { 8 | "groupName": "all non-major dependencies", 9 | "groupSlug": "all-minor-patch", 10 | "matchPackageNames": [ 11 | "*" 12 | ], 13 | "matchUpdateTypes": [ 14 | "minor", 15 | "patch" 16 | ], 17 | "matchCurrentVersion": "!/^0/", 18 | "automerge": true 19 | } 20 | ] 21 | } 22 | -------------------------------------------------------------------------------- /docs/config-samples/config-minimal-ssm.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | source: 3 | path: "image.vmdk" 4 | architecture: "x86_64" 5 | s3: 6 | bucket_name: "awspub-toabctl" 7 | images: 8 | "my-custom-image": 9 | boot_mode: "uefi-preferred" 10 | ssm_parameter: 11 | - 12 | name: "/awspub-test/my-custom-image/$version" 13 | description: | 14 | Version $version of my-custom-image 15 | - 16 | name: "/awspub-test/my-custom-image/latest" 17 | allow_overwrite: true 18 | description: | 19 | always latest version of my-custom-image 20 | -------------------------------------------------------------------------------- /_static/github_issue_links.css: -------------------------------------------------------------------------------- 1 | .github-issue-link-container { 2 | padding-right: 0.5rem; 3 | } 4 | .github-issue-link { 5 | font-size: var(--font-size--small); 6 | font-weight: bold; 7 | background-color: #D6410D; 8 | padding: 13px 23px; 9 | text-decoration: none; 10 | } 11 | .github-issue-link:link { 12 | color: #FFFFFF; 13 | } 14 | .github-issue-link:visited { 15 | color: #FFFFFF 16 | } 17 | .muted-link.github-issue-link:hover { 18 | color: #FFFFFF; 19 | text-decoration: underline; 20 | } 21 | .github-issue-link:active { 22 | color: #FFFFFF; 23 | text-decoration: underline; 24 | } 25 | -------------------------------------------------------------------------------- /docs/reference/logic.rst: -------------------------------------------------------------------------------- 1 | Reference for the API 2 | ===================== 3 | 4 | If in doubt, use the high level API. This 5 | should work well for most of the use cases. 6 | 7 | High-level API 8 | ++++++++++++++ 9 | 10 | .. automodule:: awspub 11 | :members: 12 | 13 | 14 | Low-level API 15 | +++++++++++++ 16 | 17 | .. automodule:: awspub.context 18 | :members: 19 | 20 | .. automodule:: awspub.s3 21 | :members: 22 | 23 | .. automodule:: awspub.snapshot 24 | :members: 25 | 26 | .. automodule:: awspub.image 27 | :members: 28 | 29 | .. automodule:: awspub.image_marketplace 30 | :members: 31 | 32 | .. automodule:: awspub.sns 33 | :members: 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | 22 | -------------------------------------------------------------------------------- /docs/config-samples/config-minimal-sns.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | source: 3 | path: "image.vmdk" 4 | architecture: "x86_64" 5 | s3: 6 | bucket_name: "awspub-toabctl" 7 | images: 8 | "my-custom-image": 9 | boot_mode: "uefi-preferred" 10 | sns: 11 | - "my-topic1": 12 | subject: "my-topic1-subject" 13 | message: 14 | default: "This is default message" 15 | email: "This is message for email protocols." 16 | - "my-topic2": 17 | subject: "my-topic2-subject" 18 | message: 19 | default: "This is message for email protocols. New image $serial is available" 20 | regions: 21 | - us-east-1 22 | -------------------------------------------------------------------------------- /_templates/404.html: -------------------------------------------------------------------------------- 1 | {% extends "page.html" %} 2 | 3 | {% block content -%} 4 |
5 |

Page not found

6 |
7 |
8 |
9 | {{ body }} 10 |
11 |
12 | Penguin with a question mark 13 |
14 |
15 |
16 |
17 | {%- endblock content %} 18 | -------------------------------------------------------------------------------- /awspub/exceptions.py: -------------------------------------------------------------------------------- 1 | class MultipleSnapshotsException(Exception): 2 | pass 3 | 4 | 5 | class MultipleImportSnapshotTasksException(Exception): 6 | pass 7 | 8 | 9 | class MultipleImagesException(Exception): 10 | pass 11 | 12 | 13 | class IncompleteImageSetException(Exception): 14 | pass 15 | 16 | 17 | class BucketDoesNotExistException(Exception): 18 | def __init__(self, bucket_name: str, *args, **kwargs): 19 | msg = f"The bucket named '{bucket_name}' does not exist. You will need to create the bucket before proceeding." 20 | super().__init__(msg, *args, **kwargs) 21 | 22 | 23 | class AWSNotificationException(Exception): 24 | pass 25 | 26 | 27 | class AWSAuthorizationException(Exception): 28 | pass 29 | -------------------------------------------------------------------------------- /docs/how_to/index.rst: -------------------------------------------------------------------------------- 1 | How-To Guides 2 | ============= 3 | 4 | A collection of how-to guides to jump start your usage of 'awspub'. 5 | 6 | ---- 7 | 8 | In this documentation 9 | --------------------- 10 | 11 | .. grid:: 1 1 1 1 12 | :padding: 0 13 | 14 | .. grid-item:: :doc:`How to install ` 15 | 16 | **Install** covers how to setup and install awspub 17 | 18 | .. grid-item:: :doc:`How to publish ` 19 | 20 | **Publish** covers how to publish images to AWS 21 | 22 | .. grid-item:: :doc:`How to use the API ` 23 | 24 | **API** covers how to use the API 25 | 26 | .. toctree:: 27 | :hidden: 28 | :maxdepth: 2 29 | :caption: Contents: 30 | 31 | install 32 | publish 33 | api 34 | -------------------------------------------------------------------------------- /.wordlist.txt: -------------------------------------------------------------------------------- 1 | # This wordlist is from the Sphinx starter pack and should not be 2 | # modified. Add any custom terms to .custom_wordlist.txt instead. 3 | 4 | addons 5 | API 6 | APIs 7 | balancer 8 | Charmhub 9 | CLI 10 | Diátaxis 11 | Dqlite 12 | dropdown 13 | EBS 14 | EKS 15 | enablement 16 | favicon 17 | Furo 18 | Git 19 | GitHub 20 | Grafana 21 | IAM 22 | installable 23 | JSON 24 | Juju 25 | Kubeflow 26 | Kubernetes 27 | Launchpad 28 | linter 29 | LTS 30 | Makefile 31 | Matrix 32 | Mattermost 33 | MyST 34 | namespace 35 | namespaces 36 | NodePort 37 | Numbat 38 | observability 39 | organization 40 | OEM 41 | OLM 42 | Permalink 43 | pre 44 | Quickstart 45 | ReadMe 46 | reST 47 | reStructuredText 48 | RTD 49 | subdirectories 50 | subfolders 51 | subtree 52 | Ubuntu 53 | UI 54 | UUID 55 | VM 56 | YAML 57 | -------------------------------------------------------------------------------- /docs/reference/index.rst: -------------------------------------------------------------------------------- 1 | Reference 2 | ========= 3 | 4 | Auto-generated code documentation and reference material. 5 | 6 | ---- 7 | 8 | In this documentation 9 | --------------------- 10 | 11 | .. grid:: 1 1 1 1 12 | :padding: 0 13 | 14 | .. grid-item:: :doc:`Architecture ` 15 | 16 | **Architecture** covers architectural decisions and considerations 17 | 18 | .. grid-item:: :doc:`Code ` 19 | 20 | **Code** Auto-generated reference pages for all functional classes and methods 21 | 22 | .. grid-item:: :doc:`ConfigModel Reference ` 23 | 24 | **ConfigModel** Auto-generated reference pages for config-file models 25 | 26 | .. toctree:: 27 | :hidden: 28 | :maxdepth: 2 29 | :caption: Contents: 30 | 31 | architecture 32 | config_models 33 | logic 34 | -------------------------------------------------------------------------------- /index.rst: -------------------------------------------------------------------------------- 1 | Welcome to awspub's documentation! 2 | ================================== 3 | 4 | `awspub `_ is a python module to publish AWS EC2 images. 5 | It handles image publication for a given source file (usually a .vmdk file). 6 | Only EBS backed images/AMIs are supported! 7 | 8 | 9 | ---- 10 | 11 | In this documentation 12 | --------------------- 13 | 14 | .. grid:: 1 1 1 1 15 | :padding: 0 16 | 17 | .. grid-item:: :doc:`How-To Guides ` 18 | 19 | **How-To** How-to guides to jump start your usage of 'awspub' 20 | 21 | .. grid-item:: :doc:`Reference ` 22 | 23 | **Reference** Auto generated code documentation and reference material 24 | 25 | .. toctree:: 26 | :hidden: 27 | :maxdepth: 2 28 | :caption: Contents: 29 | 30 | docs/how_to/index 31 | docs/reference/index 32 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.11" 13 | jobs: 14 | pre_install: 15 | - python3 .sphinx/build_requirements.py 16 | - git fetch --unshallow || true 17 | 18 | # Build documentation in the docs/ directory with Sphinx 19 | sphinx: 20 | builder: dirhtml 21 | configuration: conf.py 22 | fail_on_warning: true 23 | 24 | # If using Sphinx, optionally build your docs in additional formats such as PDF 25 | formats: 26 | - pdf 27 | 28 | # Optionally declare the Python requirements required to build your docs 29 | python: 30 | install: 31 | - requirements: .sphinx/requirements.txt 32 | -------------------------------------------------------------------------------- /.github/workflows/pr.yaml: -------------------------------------------------------------------------------- 1 | name: testing 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | tox: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | python: ["3.10", "3.11", "3.12", "3.13"] 15 | steps: 16 | - name: Setup Python 17 | uses: actions/setup-python@v6 18 | with: 19 | python-version: ${{ matrix.python }} 20 | - name: Install tox and any other packages 21 | run: | 22 | pip3 install tox 23 | - uses: actions/checkout@v6 24 | - name: Run tox 25 | # Run tox using the version of Python in `PATH` 26 | run: tox 27 | snap: 28 | runs-on: ubuntu-latest 29 | steps: 30 | - uses: actions/checkout@v6 31 | with: 32 | fetch-tags: true 33 | fetch-depth: 0 34 | - uses: snapcore/action-build@v1 35 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.insert(0, os.path.abspath("../")) 5 | 6 | project = 'awspub' 7 | copyright = '2023, Thomas Bechtold' 8 | author = 'Thomas Bechtold' 9 | 10 | # -- General configuration --------------------------------------------------- 11 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 12 | 13 | extensions = [ 14 | 'sphinx.ext.autodoc', 15 | 'sphinxcontrib.autodoc_pydantic', 16 | ] 17 | 18 | templates_path = ['_templates'] 19 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 20 | 21 | 22 | autodoc_typehints = 'both' 23 | 24 | autodoc_pydantic_model_show_json = True 25 | 26 | # -- Options for HTML output ------------------------------------------------- 27 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 28 | 29 | html_theme = 'alabaster' 30 | html_static_path = ['_static'] 31 | -------------------------------------------------------------------------------- /release-management.rst: -------------------------------------------------------------------------------- 1 | Doing a new release 2 | =================== 3 | 4 | New releases are mostly automated. A new github release (which includes 5 | creating a new git tag) can be done via the `github web UI `_. 6 | In `Choose a tag`, create the new one (with a `v` as prefix, so eg. `v0.1.1`) 7 | and as a `Release title`, use the same name as the tag (so eg. `v0.1.1`). 8 | 9 | pypi 10 | ---- 11 | 12 | New releases on pypi happen automatically when a new git tag gets 13 | created. The tag needs to be prefixed with `v` (eg. `v0.10.0`). 14 | 15 | snapstore 16 | --------- 17 | 18 | The latest git commit will be automatically build and published to the `latest/edge` 19 | channel. Promoting to `latest/stable` can be done with: 20 | 21 | .. code-block:: 22 | 23 | snapcraft promote --from-channel latest/edge --to-channel latest/stable awspub 24 | 25 | 26 | .. _new-release: https://github.com/canonical/awspub/releases/new 27 | -------------------------------------------------------------------------------- /.sphinx/spellingcheck.yaml: -------------------------------------------------------------------------------- 1 | matrix: 2 | - name: rST files 3 | aspell: 4 | lang: en 5 | d: en_GB 6 | dictionary: 7 | wordlists: 8 | - .wordlist.txt 9 | - .custom_wordlist.txt 10 | output: .sphinx/.wordlist.dic 11 | sources: 12 | - _build/**/*.html|!_build/genindex/*|!_build/docs/reference/config_models/index.html|!_build/docs/reference/logic/index.html|!_build/docs/reference/architecture/index.html 13 | spelling_ignore_pypi_package_names: true 14 | spelling_ignore_importable_modules: true 15 | pipeline: 16 | - pyspelling.filters.html: 17 | comments: false 18 | attributes: 19 | - title 20 | - alt 21 | ignores: 22 | - code 23 | - pre 24 | - spellexception 25 | - link 26 | - title 27 | - div.relatedlinks 28 | - strong.command 29 | - div.visually-hidden 30 | - img 31 | - a.p-navigation__link 32 | - a.contributor 33 | -------------------------------------------------------------------------------- /docs/config-samples/config-minimal-marketplace.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | source: 3 | path: "image.vmdk" 4 | architecture: "x86_64" 5 | s3: 6 | bucket_name: "awspub-toabctl" 7 | images: 8 | "my-custom-image": 9 | boot_mode: "uefi-preferred" 10 | marketplace: 11 | entity_id: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" 12 | access_role_arn: "arn:aws:iam::xxxxxxxxxxxx:role/AWSMarketplaceAccess" 13 | version_title: "1.0.0" 14 | release_notes: "N/A" 15 | user_name: "ubuntu" 16 | scanning_port: 22 17 | os_name: "UBUNTU" 18 | os_version: "24.04 LTS" 19 | usage_instructions: | 20 | You can use this custom image 21 | recommended_instance_type: "m6.large" 22 | security_groups: 23 | - 24 | from_port: 22 25 | ip_protocol: "tcp" 26 | ip_ranges: 27 | - "0.0.0.0/0" 28 | to_port: 22 29 | -------------------------------------------------------------------------------- /make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /_static/footer.css: -------------------------------------------------------------------------------- 1 | .display-contributors { 2 | color: var(--color-sidebar-link-text); 3 | cursor: pointer; 4 | } 5 | .all-contributors { 6 | display: none; 7 | z-index: 55; 8 | list-style: none; 9 | position: fixed; 10 | top: 0; 11 | bottom: 0; 12 | left: 0; 13 | right: 0; 14 | width: 200px; 15 | height: 200px; 16 | overflow-y: scroll; 17 | margin: auto; 18 | padding: 0; 19 | background: var(--color-background-primary); 20 | scrollbar-color: var(--color-foreground-border) transparent; 21 | scrollbar-width: thin; 22 | } 23 | 24 | .all-contributors li:hover { 25 | background: var(--color-sidebar-item-background--hover); 26 | width: 100%; 27 | } 28 | 29 | .all-contributors li a{ 30 | color: var(--color-sidebar-link-text); 31 | padding: 1rem; 32 | display: inline-block; 33 | } 34 | 35 | #overlay { 36 | position: fixed; 37 | display: none; 38 | width: 100%; 39 | height: 100%; 40 | top: 0; 41 | left: 0; 42 | right: 0; 43 | bottom: 0; 44 | background-color: rgba(0,0,0,0.5); 45 | z-index: 2; 46 | cursor: pointer; 47 | } 48 | -------------------------------------------------------------------------------- /_static/github_issue_links.js: -------------------------------------------------------------------------------- 1 | // if we already have an onload function, save that one 2 | var prev_handler = window.onload; 3 | 4 | window.onload = function() { 5 | // call the previous onload function 6 | if (prev_handler) { 7 | prev_handler(); 8 | } 9 | 10 | const link = document.createElement("a"); 11 | link.classList.add("muted-link"); 12 | link.classList.add("github-issue-link"); 13 | link.text = "Give feedback"; 14 | link.href = ( 15 | github_url 16 | + "/issues/new?" 17 | + "title=docs%3A+TYPE+YOUR+QUESTION+HERE" 18 | + "&body=*Please describe the question or issue you're facing with " 19 | + `"${document.title}"` 20 | + ".*" 21 | + "%0A%0A%0A%0A%0A" 22 | + "---" 23 | + "%0A" 24 | + `*Reported+from%3A+${location.href}*` 25 | ); 26 | link.target = "_blank"; 27 | 28 | const div = document.createElement("div"); 29 | div.classList.add("github-issue-link-container"); 30 | div.append(link) 31 | 32 | const container = document.querySelector(".article-container > .content-icon-container"); 33 | container.prepend(div); 34 | }; 35 | -------------------------------------------------------------------------------- /docs/reference/config_models.rst: -------------------------------------------------------------------------------- 1 | Reference for configuration 2 | =========================== 3 | 4 | This is the documentation for the pydantic models which are 5 | use for config validation. 6 | 7 | .. autopydantic_model:: awspub.configmodels.ConfigModel 8 | :model-show-json: True 9 | :model-show-config-summary: True 10 | 11 | .. autopydantic_model:: awspub.configmodels.ConfigS3Model 12 | :model-show-json: True 13 | :model-show-config-summary: True 14 | 15 | .. autopydantic_model:: awspub.configmodels.ConfigSourceModel 16 | :model-show-json: True 17 | :model-show-config-summary: True 18 | 19 | .. autopydantic_model:: awspub.configmodels.ConfigImageModel 20 | :model-show-json: True 21 | :model-show-config-summary: True 22 | 23 | .. autopydantic_model:: awspub.configmodels.ConfigImageMarketplaceModel 24 | :model-show-json: True 25 | :model-show-config-summary: True 26 | 27 | .. autopydantic_model:: awspub.configmodels.ConfigImageMarketplaceSecurityGroupModel 28 | :model-show-json: True 29 | :model-show-config-summary: True 30 | 31 | .. autopydantic_model:: awspub.configmodels.ConfigImageSNSNotificationModel 32 | :model-show-json: True 33 | :model-show-config-summary: True 34 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | minversion = 4 3 | # Choose your Python versions. They have to be available 4 | # on the system the tests are run on. 5 | envlist = py3,lint,type 6 | isolated_build = true 7 | 8 | [testenv] 9 | usedevelop=True 10 | setenv = 11 | PYTHONASYNCIODEBUG=1 12 | skip_install = true 13 | allowlist_externals = poetry 14 | deps = 15 | poetry 16 | commands_pre = 17 | poetry install --with test 18 | commands = 19 | poetry run coverage run -m pytest -s {posargs} --import-mode importlib 20 | poetry run coverage report -m 21 | 22 | [testenv:lint] 23 | commands_pre = 24 | poetry install --with test 25 | commands = 26 | poetry run flake8 {posargs} 27 | poetry run black --check --diff --line-length 120 awspub/ 28 | poetry run isort --check awspub/ 29 | 30 | [testenv:type] 31 | commands_pre = 32 | poetry install --with type 33 | commands = 34 | poetry run mypy awspub 35 | 36 | [flake8] 37 | show-source = True 38 | exclude = .venv,.tox,dist,doc,build,*.egg,node_modules,.sphinx,custom_conf.py,conf.py,tomllib 39 | max-line-length = 120 40 | 41 | [testenv:venv] 42 | commands = {posargs} 43 | 44 | [testenv:format] 45 | commands_pre = 46 | poetry install --with test 47 | commands = 48 | poetry run black --line-length 120 awspub 49 | poetry run isort awspub 50 | -------------------------------------------------------------------------------- /docs/reference/architecture.rst: -------------------------------------------------------------------------------- 1 | Architecture 2 | ============ 3 | 4 | `awspub` has some architectural choices which are important to know: 5 | 6 | * image names need to be unique within a used account. `awspub` does 7 | detect if an image already exists by querying available images 8 | by name. If the name doesn't exist, it will create the image. If the 9 | name exists, it will use that image. If the name exist multiple times 10 | it will throw an exception. 11 | * images will not be modified. so if a configuration changes its parameters 12 | for an image and the image already exists, the parameters will not 13 | be changed on EC2 (also most parameters can't be changed anyway for an 14 | image on EC2). 15 | * snapshots are tracked by a sha256sum of the underlying source file (usually 16 | a .vmdk file). Some configration parameters (`separate_snapshot` and 17 | `billing_products`) do adjust that sha256sum to make it unique for the 18 | combination of source .vmdk file and config options. 19 | * only EBS (no instance-store) and HVM (no PV) are supported. 20 | * S3 uploads are using a multipart upload so interrupted uploads can be retried 21 | but it also means that interrupted updates need to be cleanup up (best via a 22 | `bucket lifecycle config `_ 23 | -------------------------------------------------------------------------------- /awspub/tests/fixtures/config2.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | s3: 3 | bucket_name: "bucket1" 4 | 5 | source: 6 | # config1.vmdk generated with 7 | # dd if=/dev/zero of=config1.raw bs=1K count=1 8 | # qemu-img convert -f raw -O vmdk -o subformat=streamOptimized config1.raw config1.vmdk 9 | path: "config1.vmdk" 10 | architecture: "x86_64" 11 | 12 | images: 13 | "test-image-$key1": 14 | description: | 15 | A test image 16 | boot_mode: "uefi" 17 | regions: 18 | - region1 19 | - region2 20 | temporary: true 21 | "test-image-$key2": 22 | description: | 23 | A test image with a separate snapshot 24 | boot_mode: "uefi" 25 | separate_snapshot: true 26 | "test-image-3": 27 | description: | 28 | A test image with a separate snapshot and a billing code 29 | boot_mode: "uefi" 30 | separate_snapshot: true 31 | billing_products: 32 | - billingcode 33 | "test-image-4": 34 | description: | 35 | A test image without a separate snapshot but a billing product 36 | boot_mode: "uefi-preferred" 37 | billing_products: 38 | - billingcode 39 | "test-image-5": 40 | description: | 41 | A test image without a separate snapshot but multiple billing products 42 | boot_mode: "uefi-preferred" 43 | billing_products: 44 | - billingcode1 45 | - billingcode2 46 | 47 | tags: 48 | name: "foobar" 49 | -------------------------------------------------------------------------------- /_templates/page.html: -------------------------------------------------------------------------------- 1 | {% extends "furo/page.html" %} 2 | 3 | {% block footer %} 4 | {% include "footer.html" %} 5 | {% endblock footer %} 6 | 7 | {% block body -%} 8 | {% include "header.html" %} 9 | {{ super() }} 10 | {%- endblock body %} 11 | 12 | {% if meta and ((meta.discourse and discourse_prefix) or meta.relatedlinks) %} 13 | {% set furo_hide_toc_orig = furo_hide_toc %} 14 | {% set furo_hide_toc=false %} 15 | {% endif %} 16 | 17 | {% block right_sidebar %} 18 |
19 | {% if not furo_hide_toc_orig %} 20 |
21 | 22 | {{ _("Contents") }} 23 | 24 |
25 |
26 |
27 | {{ toc }} 28 |
29 |
30 | {% endif %} 31 | {% if meta and ((meta.discourse and discourse_prefix) or meta.relatedlinks) %} 32 | 37 | 47 | {% endif %} 48 |
49 | {% endblock right_sidebar %} 50 | -------------------------------------------------------------------------------- /docs/how_to/install.rst: -------------------------------------------------------------------------------- 1 | How to install awspub 2 | ===================== 3 | 4 | Setup profile configuration 5 | --------------------------- 6 | 7 | Before using this tool, you need to setup the AWS configuration and credential files. Follow 8 | `CLI configuration documentation`_ to create these two files. 9 | 10 | Example config file: 11 | 12 | .. code-block:: 13 | 14 | $ cat ~/.aws/config 15 | [default] 16 | region = us-east-1 17 | 18 | Example credential file: 19 | 20 | .. code-block:: 21 | 22 | $ cat ~/.aws/credentials 23 | [default] 24 | aws_secret_access_key = 25 | aws_access_key_id = 26 | 27 | Install awspub using snap 28 | ------------------------- 29 | 30 | `awspub` is available in the `Snapstore`_, and it can be installed using: 31 | 32 | .. code-block:: 33 | 34 | snap install awspub 35 | 36 | This will install the latest version in your machine. We would highly recommend you install the latest version, but 37 | refer to this `Snapcraft channel doc`_ for installing a different version or from a specific channel. 38 | 39 | CLI usage 40 | --------- 41 | 42 | The command line interface called ``awspub`` accepts the standard AWS environment variables such as `AWS_PROFILE`. 43 | 44 | .. _`CLI configuration documentation`: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html#cli-configure-files-using-profiles 45 | .. _`Snapstore`: https://snapcraft.io/awspub 46 | .. _`snapcraft channel doc`: https://snapcraft.io/docs/channels 47 | -------------------------------------------------------------------------------- /.sphinx/get_vale_conf.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import requests 4 | import os 5 | 6 | DIR=os.getcwd() 7 | 8 | def main(): 9 | 10 | if os.path.exists(f"{DIR}/.sphinx/styles/Canonical"): 11 | print("Vale directory exists") 12 | else: 13 | os.makedirs(f"{DIR}/.sphinx/styles/Canonical") 14 | 15 | url = "https://api.github.com/repos/canonical/praecepta/contents/styles/Canonical" 16 | r = requests.get(url) 17 | for item in r.json(): 18 | download = requests.get(item["download_url"]) 19 | file = open(".sphinx/styles/Canonical/" + item["name"], "w") 20 | file.write(download.text) 21 | file.close() 22 | 23 | if os.path.exists(f"{DIR}/.sphinx/styles/config/vocabularies/Canonical"): 24 | print("Vocab directory exists") 25 | else: 26 | os.makedirs(f"{DIR}/.sphinx/styles/config/vocabularies/Canonical") 27 | 28 | url = "https://api.github.com/repos/canonical/praecepta/contents/styles/config/vocabularies/Canonical" 29 | r = requests.get(url) 30 | for item in r.json(): 31 | download = requests.get(item["download_url"]) 32 | file = open(".sphinx/styles/config/vocabularies/Canonical/" + item["name"], "w") 33 | file.write(download.text) 34 | file.close() 35 | config = requests.get("https://raw.githubusercontent.com/canonical/praecepta/main/vale.ini") 36 | file = open(".sphinx/vale.ini", "w") 37 | file.write(config.text) 38 | file.close() 39 | 40 | if __name__ == "__main__": 41 | main() 42 | -------------------------------------------------------------------------------- /snap/snapcraft.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: awspub 3 | summary: A tool to aid in the publication of aws images 4 | description: | 5 | A tool to aid in the publication of AWS EC2 images. 6 | 7 | Credentials in ~/.aws/ need to be configured to be 8 | able to use this tool. 9 | 10 | Note: this snap is provided and maintained by Canonical! 11 | (**not** by Amazon/AWS) 12 | version: git 13 | adopt-info: awspub 14 | base: core24 15 | confinement: strict 16 | license: GPL-3.0 17 | issues: https://github.com/canonical/awspub/issues 18 | website: https://canonical-awspub.readthedocs-hosted.com/ 19 | platforms: 20 | amd64: 21 | build-on: [amd64] 22 | arm64: 23 | build-on: [arm64] 24 | 25 | plugs: 26 | dot-aws-config: 27 | interface: personal-files 28 | read: 29 | - $HOME/.aws/config 30 | dot-aws-credentials: 31 | interface: personal-files 32 | read: 33 | - $HOME/.aws/credentials 34 | dot-aws-models: 35 | interface: personal-files 36 | read: 37 | - $HOME/.aws/models 38 | 39 | apps: 40 | awspub: 41 | command: bin/awspub 42 | environment: 43 | # need to set $HOME to the real HOME here because this is a strict snap 44 | # and the creds for aws are in $HOME/.aws 45 | HOME: $SNAP_REAL_HOME 46 | plugs: 47 | - home 48 | - network 49 | - dot-aws-config 50 | - dot-aws-credentials 51 | - dot-aws-models 52 | 53 | parts: 54 | awspub: 55 | plugin: poetry 56 | source: . 57 | build-packages: 58 | - git 59 | override-pull: | 60 | craftctl default 61 | craftctl set version=$(git describe --tags) 62 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning>=1.0.0,<2.0.0"] 3 | build-backend = "poetry_dynamic_versioning.backend" 4 | 5 | [tool.poetry] 6 | name = "awspub" 7 | version = "0.0.0" 8 | description = "Publish images to AWS EC2" 9 | 10 | license = "GPL-3.0-or-later" 11 | 12 | authors = [ 13 | "Thomas Bechtold " 14 | ] 15 | 16 | repository = "https://github.com/canonical/awspub" 17 | homepage = "https://github.com/canonical/awspub" 18 | readme = ["readme.rst"] 19 | keywords = ["AWS", "EC2", "publication"] 20 | 21 | 22 | [tool.poetry.dependencies] 23 | python = "^3.10" 24 | boto3 = "*" 25 | pydantic = "^2" 26 | boto3-stubs = {extras = ["essential", "marketplace-catalog", "ssm", "s3", "sns", "sts"], version = "^1.28.83"} 27 | autodoc-pydantic = "^2.0.1" 28 | ruamel-yaml = "^0.18.6" 29 | 30 | 31 | [tool.poetry.group.test] 32 | optional = true 33 | 34 | [tool.poetry.group.test.dependencies] 35 | flake8 = "*" 36 | black = "*" 37 | pytest = "*" 38 | coverage = "^7.4.1" 39 | isort = "^7.0.0" 40 | 41 | [tool.poetry.group.type] 42 | optional = true 43 | 44 | [tool.poetry.group.type.dependencies] 45 | mypy = "*" 46 | pytest = "*" 47 | 48 | [tool.poetry.group.doc] 49 | optional = true 50 | 51 | [tool.poetry.group.doc.dependencies] 52 | Sphinx = "*" 53 | pydantic-settings = "*" 54 | autodoc_pydantic = "*" 55 | 56 | 57 | [tool.poetry.scripts] 58 | awspub = "awspub.cli:main" 59 | 60 | [tool.poetry-dynamic-versioning] 61 | enable = true 62 | vcs = 'git' 63 | 64 | [tool.isort] 65 | profile = "black" 66 | 67 | [tool.black] 68 | line-length = 120 69 | -------------------------------------------------------------------------------- /.github/workflows/sphinx-python-dependency-build-checks.yml: -------------------------------------------------------------------------------- 1 | # The purpose of this workflow file is to confirm that the Sphinx 2 | # virtual environment can be built from source, consequently documenting 3 | # the packages required in the build environment to do that. 4 | # 5 | # This is needed because some projects embeds the documentation into built 6 | # artifacts which involves rendering the documentation on the target 7 | # architecture. 8 | # 9 | # Depending on the architecture, pip may or may not have already built wheels 10 | # available, and as such we need to make sure building wheels from source can 11 | # succeed. 12 | name: Check and document build requirements for Sphinx venv 13 | on: 14 | - push 15 | - pull_request 16 | - workflow_dispatch 17 | 18 | 19 | concurrency: 20 | group: ${{ github.workflow }}-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | build: 25 | name: build 26 | runs-on: ubuntu-latest 27 | 28 | steps: 29 | - name: Checkout code 30 | uses: actions/checkout@v6 31 | 32 | - name: Set up Python 33 | uses: actions/setup-python@v6 34 | with: 35 | python-version: '3.13' 36 | 37 | - name: Install dependencies 38 | run: | 39 | set -ex 40 | sudo apt -y install \ 41 | cargo \ 42 | libpython3-dev \ 43 | libxml2-dev \ 44 | libxslt1-dev \ 45 | make \ 46 | python3-venv \ 47 | rustc 48 | 49 | - name: Build Sphinx venv 50 | run: | 51 | set -ex 52 | make -f Makefile.sp \ 53 | sp-install \ 54 | PIPOPTS="--no-binary :all:" \ 55 | || ( cat .sphinx/venv/pip_install.log && exit 1 ) 56 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # This Makefile stub allows you to customize starter pack (SP) targets. 2 | # Consider this file as a bridge between your project 3 | # and the starter pack's predefined targets that reside in Makefile.sp. 4 | # 5 | # You can add your own, non-SP targets here or override SP targets 6 | # to fit your project's needs. For example, you can define and use targets 7 | # named "install" or "run", but continue to use SP targets like "sp-install" 8 | # or "sp-run" when working on the documentation. 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @echo "\n" \ 13 | "------------------------------------------------------------- \n" \ 14 | "* watch, build and serve the documentation: make run \n" \ 15 | "* only build: make html \n" \ 16 | "* only serve: make serve \n" \ 17 | "* clean built doc files: make clean-doc \n" \ 18 | "* clean full environment: make clean \n" \ 19 | "* check links: make linkcheck \n" \ 20 | "* check spelling: make spelling \n" \ 21 | "* check spelling (without building again): make spellcheck \n" \ 22 | "* check inclusive language: make woke \n" \ 23 | "* check accessibility: make pa11y \n" \ 24 | "* check style guide compliance: make vale \n" \ 25 | "* check style guide compliance on target: make vale TARGET=* \n" \ 26 | "* other possible targets: make \n" \ 27 | "------------------------------------------------------------- \n" 28 | 29 | %: 30 | $(MAKE) -f Makefile.sp sp-$@ 31 | -------------------------------------------------------------------------------- /_static/css/cookie-banner.css: -------------------------------------------------------------------------------- 1 | /* Cookie policy styling WILL BE REMOVED when implementation of new theme with vanilla is implemented */ 2 | .cookie-policy { 3 | overflow: auto; 4 | top: 35%; 5 | z-index: 50; 6 | position: fixed; 7 | } 8 | 9 | dialog.cookie-policy { 10 | background-color: var(--color-code-background); 11 | color: var(--color-code-foreground); 12 | height: auto; 13 | max-height: 60vh; 14 | max-width: 40rem; 15 | padding: 0 1rem 0 1rem; 16 | width: auto; 17 | } 18 | 19 | header.p-modal__header { 20 | margin-bottom: .5rem; 21 | } 22 | 23 | header.p-modal__header::after { 24 | background-color: #d9d9d9; 25 | content: ""; 26 | height: 1px; 27 | left: 0; 28 | margin-left: 1rem; 29 | margin-right: 1rem; 30 | position: absolute; 31 | right: 0; 32 | } 33 | 34 | h2#cookie-policy-title.p-modal__title { 35 | align-self: flex-end; 36 | font-size: 1.5rem; 37 | font-style: normal; 38 | font-weight: 275; 39 | line-height: 2rem; 40 | margin: 0 0 1.05rem 0; 41 | padding: 0.45rem 0 0 0; 42 | } 43 | 44 | .cookie-policy p { 45 | font-size: 1rem; 46 | line-height: 1.5rem; 47 | margin-top: 0; 48 | padding-top: .4rem; 49 | } 50 | 51 | .cookie-policy p a { 52 | text-decoration: none; 53 | color: var(--color-link); 54 | } 55 | .cookie-policy button { 56 | border-style: solid; 57 | border-width: 1.5px; 58 | cursor: pointer; 59 | display: inline-block; 60 | font-size: 1rem; 61 | font-weight: 400; 62 | justify-content: center; 63 | line-height: 1.5rem; 64 | padding: calc(.4rem - 1px) 1rem; 65 | text-align: center; 66 | text-decoration: none; 67 | transition-duration: .1s; 68 | transition-property: background-color,border-color; 69 | transition-timing-function: cubic-bezier(0.55,0.055,0.675,0.19); 70 | } 71 | 72 | .cookie-policy button { 73 | background-color: #fff; 74 | border-color: rgba(0,0,0,0.56); 75 | color: #000; 76 | } 77 | 78 | .cookie-policy .p-button--positive { 79 | background-color: #0e8420; 80 | border-color: #0e8420; 81 | color: #fff; 82 | } 83 | -------------------------------------------------------------------------------- /awspub/tests/test_image_marketplace.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | from unittest.mock import patch 3 | 4 | import pytest 5 | 6 | from awspub import context, image_marketplace 7 | 8 | curdir = pathlib.Path(__file__).parent.resolve() 9 | 10 | 11 | @pytest.mark.parametrize( 12 | "imagename,new_version,called_start_change_set", 13 | [ 14 | # same version that already exists 15 | ("test-image-8", "1.0.0", False), 16 | # new version 17 | ("test-image-8", "2.0.0", True), 18 | ], 19 | ) 20 | def test_image_marketplace_request_new_version(imagename, new_version, called_start_change_set): 21 | """ 22 | Test the request_new_version logic 23 | """ 24 | with patch("boto3.client") as bclient_mock: 25 | instance = bclient_mock.return_value 26 | instance.describe_entity.return_value = {"DetailsDocument": {"Versions": [{"VersionTitle": new_version}]}} 27 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 28 | img = image_marketplace.ImageMarketplace(ctx, imagename) 29 | img.request_new_version("ami-123") 30 | assert instance.start_change_set.called == called_start_change_set 31 | 32 | 33 | def test_image_marketplace_request_new_version_none_exists(): 34 | """ 35 | Test the request_new_version logic if no version exist already 36 | """ 37 | with patch("boto3.client") as bclient_mock: 38 | instance = bclient_mock.return_value 39 | instance.describe_entity.return_value = {"DetailsDocument": {}} 40 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 41 | img = image_marketplace.ImageMarketplace(ctx, "test-image-8") 42 | img.request_new_version("ami-123") 43 | assert instance.start_change_set.called is True 44 | 45 | 46 | @pytest.mark.parametrize( 47 | "name,expected", 48 | [ 49 | ("1.0.0", "1.0.0"), 50 | ("1.0.0 (testing)", "1.0.0 testing"), 51 | ("a sentence with spaces", "a sentence with spaces"), 52 | ("_+=.:@-", "_+=.:@-"), 53 | ("(parens) [brackets] |pipes|", "parens brackets pipes"), 54 | ], 55 | ) 56 | def test_changeset_name_sanitization(name, expected): 57 | assert image_marketplace.ImageMarketplace.sanitize_changeset_name(name) == expected 58 | -------------------------------------------------------------------------------- /_templates/header.html: -------------------------------------------------------------------------------- 1 | 76 | -------------------------------------------------------------------------------- /awspub/tests/test_common.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import patch 2 | 3 | import pytest 4 | 5 | from awspub.common import _get_regions, _split_partition 6 | 7 | 8 | @pytest.mark.parametrize( 9 | "input,expected_output", 10 | [ 11 | ("123456789123", ("aws", "123456789123")), 12 | ("aws:123456789123", ("aws", "123456789123")), 13 | ("aws-cn:123456789123", ("aws-cn", "123456789123")), 14 | ("aws-us-gov:123456789123", ("aws-us-gov", "123456789123")), 15 | ( 16 | "arn:aws:organizations::123456789012:organization/o-123example", 17 | ("aws", "arn:aws:organizations::123456789012:organization/o-123example"), 18 | ), 19 | ( 20 | "arn:aws:organizations::123456789012:ou/o-123example/ou-1234-5example", 21 | ("aws", "arn:aws:organizations::123456789012:ou/o-123example/ou-1234-5example"), 22 | ), 23 | ( 24 | "arn:aws-cn:organizations::123456789012:organization/o-123example", 25 | ("aws-cn", "arn:aws-cn:organizations::123456789012:organization/o-123example"), 26 | ), 27 | ( 28 | "arn:aws-cn:organizations::123456789012:ou/o-123example/ou-1234-5example", 29 | ("aws-cn", "arn:aws-cn:organizations::123456789012:ou/o-123example/ou-1234-5example"), 30 | ), 31 | ( 32 | "arn:aws-us-gov:organizations::123456789012:organization/o-123example", 33 | ("aws-us-gov", "arn:aws-us-gov:organizations::123456789012:organization/o-123example"), 34 | ), 35 | ( 36 | "arn:aws-us-gov:organizations::123456789012:ou/o-123example/ou-1234-5example", 37 | ("aws-us-gov", "arn:aws-us-gov:organizations::123456789012:ou/o-123example/ou-1234-5example"), 38 | ), 39 | ], 40 | ) 41 | def test_common__split_partition(input, expected_output): 42 | assert _split_partition(input) == expected_output 43 | 44 | 45 | @pytest.mark.parametrize( 46 | "regions_in_partition,configured_regions,expected_output", 47 | [ 48 | (["region-1", "region-2"], ["region-1", "region-3"], ["region-1"]), 49 | (["region-1", "region-2", "region-3"], ["region-4", "region-5"], []), 50 | (["region-1", "region-2"], [], ["region-1", "region-2"]), 51 | ], 52 | ) 53 | def test_common__get_regions(regions_in_partition, configured_regions, expected_output): 54 | with patch("boto3.client") as bclient_mock: 55 | instance = bclient_mock.return_value 56 | instance.describe_regions.return_value = {"Regions": [{"RegionName": r} for r in regions_in_partition]} 57 | 58 | assert _get_regions("", configured_regions) == expected_output 59 | -------------------------------------------------------------------------------- /awspub/common.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import List, Tuple 3 | 4 | import boto3 5 | from mypy_boto3_ec2.client import EC2Client 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | 10 | def _split_partition(val: str) -> Tuple[str, str]: 11 | """ 12 | Split a string into partition and resource, separated by a colon. If no partition is given, assume "aws" 13 | :param val: the string to split 14 | :type val: str 15 | :return: the partition and the resource 16 | :rtype: Tuple[str, str] 17 | """ 18 | 19 | # ARNs encode partition https://docs.aws.amazon.com/IAM/latest/UserGuide/reference-arns.html 20 | if val.startswith("arn:"): 21 | arn, partition, resource = val.split(":", maxsplit=2) 22 | # Return extracted partition, but keep full ARN intact 23 | return partition, val 24 | 25 | # Partition prefix 26 | if ":" in val and val.startswith("aws"): 27 | partition, resource = val.split(":", maxsplit=1) 28 | return partition, resource 29 | 30 | # if no partition is given, assume default commercial partition "aws" 31 | return "aws", val 32 | 33 | 34 | def _get_regions(region_to_query: str, regions_allowlist: List[str]) -> List[str]: 35 | """ 36 | Get a list of region names querying the `region_to_query` for all regions and 37 | then filtering by `regions_allowlist`. 38 | If no `regions_allowlist` is given, all queried regions are returned for the 39 | current partition. 40 | If `regions_allowlist` is given, all regions from that list are returned if 41 | the listed region exist in the current partition. 42 | Eg. `us-east-1` listed in `regions_allowlist` won't be returned if the current 43 | partition is `aws-cn`. 44 | :param region_to_query: region name of current partition 45 | :type region_to_query: str 46 | :praram regions_allowlist: list of regions in config file 47 | :type regions_allowlist: List[str] 48 | :return: list of regions names 49 | :rtype: List[str] 50 | """ 51 | 52 | # get all available regions 53 | ec2client: EC2Client = boto3.client("ec2", region_name=region_to_query) 54 | resp = ec2client.describe_regions() 55 | ec2_regions_all = [r["RegionName"] for r in resp["Regions"]] 56 | 57 | if regions_allowlist: 58 | # filter out regions that are not available in the current partition 59 | regions_allowlist_set = set(regions_allowlist) 60 | ec2_regions_all_set = set(ec2_regions_all) 61 | regions = list(regions_allowlist_set.intersection(ec2_regions_all_set)) 62 | diff = regions_allowlist_set.difference(ec2_regions_all_set) 63 | if diff: 64 | logger.warning( 65 | f"regions {diff} listed in regions allowlist are not available in the current partition." 66 | " Ignoring those." 67 | ) 68 | else: 69 | regions = ec2_regions_all 70 | 71 | return regions 72 | -------------------------------------------------------------------------------- /awspub/tests/test_api.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | 3 | import pytest 4 | 5 | from awspub import api, context, image 6 | 7 | curdir = pathlib.Path(__file__).parent.resolve() 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "group,expected_image_names", 12 | [ 13 | # without any group, all images should be processed 14 | ( 15 | None, 16 | [ 17 | "test-image-1", 18 | "test-image-2", 19 | "test-image-3", 20 | "test-image-4", 21 | "test-image-5", 22 | "test-image-6", 23 | "test-image-7", 24 | "test-image-8", 25 | "test-image-9", 26 | "test-image-10", 27 | "test-image-11", 28 | "test-image-12", 29 | ], 30 | ), 31 | # with a group that no image as, no image should be processed 32 | ( 33 | "group-not-used", 34 | [], 35 | ), 36 | # with a group that an image has 37 | ( 38 | "group2", 39 | ["test-image-1"], 40 | ), 41 | # with a group that multiple images have 42 | ( 43 | "group1", 44 | ["test-image-1", "test-image-2"], 45 | ), 46 | ], 47 | ) 48 | def test_api__images_filtered(group, expected_image_names): 49 | """ 50 | Test the _images_filtered() function 51 | """ 52 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 53 | 54 | image_names = [i[0] for i in api._images_filtered(ctx, group)] 55 | assert image_names == expected_image_names 56 | 57 | 58 | @pytest.mark.parametrize( 59 | "group,expected", 60 | [ 61 | # without any group, all images should be processed 62 | ( 63 | None, 64 | ( 65 | {"test-image-1": {"eu-central-1": "ami-123", "eu-central-2": "ami-456"}}, 66 | { 67 | "group1": {"test-image-1": {"eu-central-1": "ami-123", "eu-central-2": "ami-456"}}, 68 | "group2": {"test-image-1": {"eu-central-1": "ami-123", "eu-central-2": "ami-456"}}, 69 | }, 70 | ), 71 | ), 72 | # with a group that no image as, image should be there but nothing in the group 73 | ("group-not-used", ({"test-image-1": {"eu-central-1": "ami-123", "eu-central-2": "ami-456"}}, {})), 74 | ], 75 | ) 76 | def test_api__images_grouped(group, expected): 77 | """ 78 | Test the _images_grouped() function 79 | """ 80 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 81 | images = [ 82 | ( 83 | "test-image-1", 84 | image.Image(ctx, "test-image-1"), 85 | {"eu-central-1": image._ImageInfo("ami-123", None), "eu-central-2": image._ImageInfo("ami-456", None)}, 86 | ) 87 | ] 88 | grouped = api._images_grouped(images, group) 89 | assert grouped == expected 90 | -------------------------------------------------------------------------------- /.github/workflows/publish-to-pypi.yaml: -------------------------------------------------------------------------------- 1 | # from https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ 2 | 3 | name: Publish Python 🐍 distribution 📦 to PyPI 4 | 5 | on: push 6 | 7 | jobs: 8 | build: 9 | name: Build distribution 📦 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v6 14 | - name: Set up Python 15 | uses: actions/setup-python@v6 16 | with: 17 | python-version: "3.x" 18 | - name: Install pypa/build 19 | run: >- 20 | python3 -m 21 | pip install 22 | build 23 | --user 24 | - name: Build a binary wheel and a source tarball 25 | run: python3 -m build 26 | - name: Store the distribution packages 27 | uses: actions/upload-artifact@v6 28 | with: 29 | name: python-package-distributions 30 | path: dist/ 31 | 32 | 33 | publish-to-pypi: 34 | name: >- 35 | Publish Python 🐍 distribution 📦 to PyPI 36 | if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes 37 | needs: 38 | - build 39 | runs-on: ubuntu-latest 40 | environment: 41 | name: pypi 42 | url: https://pypi.org/p/awspub 43 | permissions: 44 | id-token: write # IMPORTANT: mandatory for trusted publishing 45 | 46 | steps: 47 | - name: Download all the dists 48 | uses: actions/download-artifact@v7 49 | with: 50 | name: python-package-distributions 51 | path: dist/ 52 | - name: Publish distribution 📦 to PyPI 53 | uses: pypa/gh-action-pypi-publish@release/v1 54 | 55 | 56 | github-release: 57 | name: >- 58 | Sign the Python 🐍 distribution 📦 with Sigstore 59 | and upload them to GitHub Release 60 | needs: 61 | - publish-to-pypi 62 | runs-on: ubuntu-latest 63 | 64 | permissions: 65 | contents: write # IMPORTANT: mandatory for making GitHub Releases 66 | id-token: write # IMPORTANT: mandatory for sigstore 67 | 68 | steps: 69 | - name: Download all the dists 70 | uses: actions/download-artifact@v7 71 | with: 72 | name: python-package-distributions 73 | path: dist/ 74 | - name: Sign the dists with Sigstore 75 | uses: sigstore/gh-action-sigstore-python@v3.0.1 76 | with: 77 | inputs: >- 78 | ./dist/*.tar.gz 79 | ./dist/*.whl 80 | - name: Create GitHub Release 81 | env: 82 | GITHUB_TOKEN: ${{ github.token }} 83 | run: >- 84 | gh release create 85 | '${{ github.ref_name }}' 86 | --repo '${{ github.repository }}' 87 | --notes "" 88 | - name: Upload artifact signatures to GitHub Release 89 | env: 90 | GITHUB_TOKEN: ${{ github.token }} 91 | # Upload to GitHub Release using the `gh` CLI. 92 | # `dist/` contains the built packages, and the 93 | # sigstore-produced signatures and certificates. 94 | run: >- 95 | gh release upload 96 | '${{ github.ref_name }}' dist/** 97 | --repo '${{ github.repository }}' 98 | -------------------------------------------------------------------------------- /awspub/tests/test_s3.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | from unittest.mock import patch 3 | 4 | import pytest 5 | 6 | from awspub import context, s3 7 | from awspub.exceptions import BucketDoesNotExistException 8 | 9 | curdir = pathlib.Path(__file__).parent.resolve() 10 | 11 | 12 | @pytest.mark.parametrize( 13 | "list_multipart_uploads_resp,create_multipart_upload_called", 14 | [ 15 | # no available uploads - create one 16 | ([], True), 17 | # one available upload with non-matching key 18 | ([{"UploadId": "abc", "Key": "does-not-match"}], True), 19 | # multiple available upload with non-matching key 20 | ([{"UploadId": "abc", "Key": "does-not-match"}, {"UploadId": "def", "Key": "does-not-match2"}], True), 21 | # one available upload with matching key 22 | ([{"UploadId": "abc", "Key": "6252475408b9f9ee64452b611d706a078831a99b123db69d144d878a0488a0a8"}], False), 23 | # multiple available upload with one matching key 24 | ( 25 | [ 26 | {"UploadId": "abc", "Key": "6252475408b9f9ee64452b611d706a078831a99b123db69d144d878a0488a0a8"}, 27 | {"UploadId": "abc", "Key": "does-not-match"}, 28 | ], 29 | False, 30 | ), 31 | # multiple available upload with multiple matching keys 32 | ( 33 | [ 34 | {"UploadId": "abc", "Key": "6252475408b9f9ee64452b611d706a078831a99b123db69d144d878a0488a0a8"}, 35 | {"UploadId": "def", "Key": "6252475408b9f9ee64452b611d706a078831a99b123db69d144d878a0488a0a8"}, 36 | ], 37 | False, 38 | ), 39 | ], 40 | ) 41 | def test_s3__get_multipart_upload_id(list_multipart_uploads_resp, create_multipart_upload_called): 42 | """ 43 | test the _get_multipart_upload_id() function 44 | """ 45 | 46 | with patch("boto3.client") as bclient_mock: 47 | instance = bclient_mock.return_value 48 | instance.list_multipart_uploads.return_value = {"Uploads": list_multipart_uploads_resp} 49 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 50 | sthree = s3.S3(ctx) 51 | sthree._get_multipart_upload_id() 52 | assert instance.create_multipart_upload.called == create_multipart_upload_called 53 | 54 | 55 | @patch("awspub.s3.S3._bucket_exists", return_value=True) 56 | @patch("awspub.s3.boto3") 57 | def test_s3_bucket_region_bucket_exists(boto3_mock, bucket_exists_mock): 58 | region_name = "sample-region-1" 59 | head_bucket = {"BucketRegion": region_name} 60 | boto3_mock.client.return_value.head_bucket.return_value = head_bucket 61 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 62 | sthree = s3.S3(ctx) 63 | 64 | assert sthree.bucket_region == region_name 65 | 66 | 67 | @patch("awspub.s3.S3._bucket_exists", return_value=False) 68 | @patch("boto3.client") 69 | def test_s3_bucket_region_bucket_not_exists(bclient_mock, bucket_exists_mock): 70 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 71 | sthree = s3.S3(ctx) 72 | 73 | with pytest.raises(BucketDoesNotExistException): 74 | sthree.bucket_region() 75 | -------------------------------------------------------------------------------- /awspub/tests/test_context.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | import pathlib 4 | 5 | import pytest 6 | from pydantic import ValidationError 7 | from ruamel.yaml.constructor import DuplicateKeyError 8 | 9 | from awspub import context 10 | 11 | curdir = pathlib.Path(__file__).parent.resolve() 12 | 13 | 14 | def test_context_create(): 15 | """ 16 | Create a Context object from a given configuration 17 | """ 18 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 19 | assert ctx.conf["source"]["path"] == curdir / "fixtures/config1.vmdk" 20 | assert ctx.source_sha256 == "6252475408b9f9ee64452b611d706a078831a99b123db69d144d878a0488a0a8" 21 | assert ctx.conf["source"]["architecture"] == "x86_64" 22 | assert ctx.conf["s3"]["bucket_name"] == "bucket1" 23 | 24 | 25 | def test_context_create_minimal(): 26 | """ 27 | Create a Context object from a given minimal configuration 28 | """ 29 | ctx = context.Context(curdir / "fixtures/config-minimal.yaml", None) 30 | assert ctx.conf["source"]["path"] == curdir / "fixtures/config1.vmdk" 31 | assert ctx.source_sha256 == "6252475408b9f9ee64452b611d706a078831a99b123db69d144d878a0488a0a8" 32 | assert ctx.conf["source"]["architecture"] == "x86_64" 33 | assert ctx.conf["s3"]["bucket_name"] == "bucket1" 34 | 35 | 36 | def test_context_create_with_mapping(): 37 | """ 38 | Create a Context object from a given configuration 39 | """ 40 | ctx = context.Context(curdir / "fixtures/config2.yaml", curdir / "fixtures/config2-mapping.yaml") 41 | assert ctx.conf["source"]["path"] == curdir / "fixtures/config1.vmdk" 42 | assert ctx.source_sha256 == "6252475408b9f9ee64452b611d706a078831a99b123db69d144d878a0488a0a8" 43 | assert ctx.conf["images"].get("test-image-value1") 44 | assert ctx.conf["images"].get("test-image-value$2") 45 | 46 | 47 | def test_context_with_docs_config_samples(): 48 | """ 49 | Create a Context object with the sample config files used for documentation 50 | """ 51 | config_samples_dir = curdir.parents[1] / "docs" / "config-samples" 52 | for f in glob.glob(f"{config_samples_dir}/*.yaml"): 53 | mapping_file = f + ".mapping" 54 | if os.path.exists(mapping_file): 55 | mapping = mapping_file 56 | else: 57 | mapping = None 58 | context.Context(os.path.join(config_samples_dir, f), mapping) 59 | 60 | 61 | def test_context_with_duplicate_image_name(): 62 | """ 63 | Create a context with a configuration file that contains a duplicate image name key 64 | """ 65 | with pytest.raises(DuplicateKeyError): 66 | context.Context(curdir / "fixtures/config3-duplicate-keys.yaml", None) 67 | 68 | 69 | @pytest.mark.parametrize( 70 | "config_file", 71 | ["fixtures/config-minimal.yaml", "fixtures/config-valid-nonawspub.yaml"], 72 | ) 73 | def test_valid_configuration(config_file): 74 | """ 75 | Test with a valid configuration file (no extra fields) 76 | """ 77 | ctx = context.Context(curdir / config_file, None) 78 | assert ctx.conf is not None 79 | assert ctx.conf["s3"]["bucket_name"] == "bucket1" 80 | assert ctx.conf["source"]["architecture"] == "x86_64" 81 | 82 | 83 | def test_invalid_configuration_extra_field(): 84 | """ 85 | Test with an invalid configuration file that includes an extra field 86 | """ 87 | with pytest.raises(ValidationError): 88 | context.Context(curdir / "fixtures/config-invalid-s3-extra.yaml", None) 89 | -------------------------------------------------------------------------------- /_templates/footer.html: -------------------------------------------------------------------------------- 1 | 38 |
39 |
40 | {%- if show_copyright %} 41 | 52 | {%- endif %} 53 | 54 | {# mod: removed "Made with" #} 55 | 56 | {%- if last_updated -%} 57 |
58 | {% trans last_updated=last_updated|e -%} 59 | Last updated on {{ last_updated }} 60 | {%- endtrans -%} 61 |
62 | {%- endif %} 63 | 64 | {%- if show_source and has_source and sourcename %} 65 |
66 | Show source 68 |
69 | {%- endif %} 70 |
71 |
72 | {% if has_contributor_listing and display_contributors and pagename and page_source_suffix %} 73 | {% set contributors = get_contributors_for_file(pagename, page_source_suffix) %} 74 | {% if contributors %} 75 | {% if contributors | length > 1 %} 76 | Thanks to the {{ contributors |length }} contributors! 77 | {% else %} 78 | Thanks to our contributor! 79 | {% endif %} 80 |
81 |
    82 | {% for contributor in contributors %} 83 |
  • 84 | {{ contributor[0] }} 85 |
  • 86 | {% endfor %} 87 |
88 | {% endif %} 89 | {% endif %} 90 |
91 | 92 |
93 | -------------------------------------------------------------------------------- /_static/404.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /_static/header.css: -------------------------------------------------------------------------------- 1 | .p-navigation { 2 | border-bottom: 1px solid var(--color-sidebar-background-border); 3 | } 4 | 5 | .p-navigation__nav { 6 | background: #333333; 7 | display: flex; 8 | } 9 | 10 | .p-logo { 11 | display: flex !important; 12 | padding-top: 0 !important; 13 | text-decoration: none; 14 | } 15 | 16 | .p-logo-image { 17 | height: 44px; 18 | padding-right: 10px; 19 | } 20 | 21 | .p-logo-text { 22 | margin-top: 18px; 23 | color: white; 24 | text-decoration: none; 25 | } 26 | 27 | ul.p-navigation__links { 28 | display: flex; 29 | list-style: none; 30 | margin-left: 0; 31 | margin-top: auto; 32 | margin-bottom: auto; 33 | max-width: 800px; 34 | width: 100%; 35 | } 36 | 37 | ul.p-navigation__links li { 38 | margin: 0 auto; 39 | text-align: center; 40 | width: 100%; 41 | } 42 | 43 | ul.p-navigation__links li a { 44 | background-color: rgba(0, 0, 0, 0); 45 | border: none; 46 | border-radius: 0; 47 | color: var(--color-sidebar-link-text); 48 | display: block; 49 | font-weight: 400; 50 | line-height: 1.5rem; 51 | margin: 0; 52 | overflow: hidden; 53 | padding: 1rem 0; 54 | position: relative; 55 | text-align: left; 56 | text-overflow: ellipsis; 57 | transition-duration: .1s; 58 | transition-property: background-color, color, opacity; 59 | transition-timing-function: cubic-bezier(0.215, 0.61, 0.355, 1); 60 | white-space: nowrap; 61 | width: 100%; 62 | } 63 | 64 | ul.p-navigation__links .p-navigation__link { 65 | color: #ffffff; 66 | font-weight: 300; 67 | text-align: center; 68 | text-decoration: none; 69 | } 70 | 71 | ul.p-navigation__links .p-navigation__link:hover { 72 | background-color: #2b2b2b; 73 | } 74 | 75 | ul.p-navigation__links .p-dropdown__link:hover { 76 | background-color: var(--color-sidebar-item-background--hover); 77 | } 78 | 79 | ul.p-navigation__links .p-navigation__sub-link { 80 | background: var(--color-background-primary); 81 | padding: .5rem 0 .5rem .5rem; 82 | font-weight: 300; 83 | } 84 | 85 | ul.p-navigation__links .more-links-dropdown li a { 86 | border-left: 1px solid var(--color-sidebar-background-border); 87 | border-right: 1px solid var(--color-sidebar-background-border); 88 | } 89 | 90 | ul.p-navigation__links .more-links-dropdown li:first-child a { 91 | border-top: 1px solid var(--color-sidebar-background-border); 92 | } 93 | 94 | ul.p-navigation__links .more-links-dropdown li:last-child a { 95 | border-bottom: 1px solid var(--color-sidebar-background-border); 96 | } 97 | 98 | ul.p-navigation__links .p-navigation__logo { 99 | padding: 0.5rem; 100 | } 101 | 102 | ul.p-navigation__links .p-navigation__logo img { 103 | width: 40px; 104 | } 105 | 106 | ul.more-links-dropdown { 107 | display: none; 108 | overflow-x: visible; 109 | height: 0; 110 | z-index: 55; 111 | padding: 0; 112 | position: relative; 113 | list-style: none; 114 | margin-bottom: 0; 115 | margin-top: 0; 116 | } 117 | 118 | .nav-more-links::after { 119 | background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='16' height='16'%3E%3Cpath fill='%23111' d='M8.187 11.748l6.187-6.187-1.06-1.061-5.127 5.127L3.061 4.5 2 5.561z'/%3E%3C/svg%3E"); 120 | background-position: center; 121 | background-repeat: no-repeat; 122 | background-size: contain; 123 | content: ""; 124 | display: block; 125 | filter: invert(100%); 126 | height: 1rem; 127 | pointer-events: none; 128 | position: absolute; 129 | right: 1rem; 130 | text-indent: calc(100% + 10rem); 131 | top: calc(1rem + 0.25rem); 132 | width: 1rem; 133 | } 134 | 135 | .nav-ubuntu-com { 136 | display: none; 137 | } 138 | 139 | @media only screen and (min-width: 480px) { 140 | ul.p-navigation__links li { 141 | width: 100%; 142 | } 143 | 144 | .nav-ubuntu-com { 145 | display: inherit; 146 | } 147 | } 148 | 149 | @media only screen and (max-width: 800px) { 150 | .nav-more-links { 151 | margin-left: auto !important; 152 | padding-right: 2rem !important; 153 | width: 8rem !important; 154 | } 155 | } 156 | 157 | @media only screen and (min-width: 800px) { 158 | ul.p-navigation__links li { 159 | width: 100% !important; 160 | } 161 | } 162 | 163 | @media only screen and (min-width: 1310px) { 164 | ul.p-navigation__links { 165 | margin-left: calc(50% - 41em); 166 | } 167 | } 168 | -------------------------------------------------------------------------------- /.sphinx/build_requirements.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path.append('./') 4 | from custom_conf import * 5 | 6 | # The file contains helper functions and the mechanism to build the 7 | # .sphinx/requirements.txt file that is needed to set up the virtual 8 | # environment. 9 | 10 | # You should not do any modifications to this file. Put your custom 11 | # requirements into the custom_required_modules array in the custom_conf.py 12 | # file. If you need to change this file, contribute the changes upstream. 13 | 14 | legacyCanonicalSphinxExtensionNames = [ 15 | "youtube-links", 16 | "related-links", 17 | "custom-rst-roles", 18 | "terminal-output" 19 | ] 20 | 21 | def IsAnyCanonicalSphinxExtensionUsed(): 22 | for extension in custom_extensions: 23 | if (extension.startswith("canonical.") or 24 | extension in legacyCanonicalSphinxExtensionNames): 25 | return True 26 | 27 | return False 28 | 29 | def IsNotFoundExtensionUsed(): 30 | return "notfound.extension" in custom_extensions 31 | 32 | def IsSphinxTabsUsed(): 33 | for extension in custom_extensions: 34 | if extension.startswith("sphinx_tabs."): 35 | return True 36 | 37 | return False 38 | 39 | def AreRedirectsDefined(): 40 | return ("sphinx_reredirects" in custom_extensions) or ( 41 | ("redirects" in globals()) and \ 42 | (redirects is not None) and \ 43 | (len(redirects) > 0)) 44 | 45 | def IsOpenGraphConfigured(): 46 | if "sphinxext.opengraph" in custom_extensions: 47 | return True 48 | 49 | for global_variable_name in list(globals()): 50 | if global_variable_name.startswith("ogp_"): 51 | return True 52 | 53 | return False 54 | 55 | def IsMyStParserUsed(): 56 | return ("myst_parser" in custom_extensions) or \ 57 | ("custom_myst_extensions" in globals()) 58 | 59 | def DeduplicateExtensions(extensionNames: [str]): 60 | extensionNames = dict.fromkeys(extensionNames) 61 | resultList = [] 62 | encounteredCanonicalExtensions = [] 63 | 64 | for extensionName in extensionNames: 65 | if extensionName in legacyCanonicalSphinxExtensionNames: 66 | extensionName = "canonical." + extensionName 67 | 68 | if extensionName.startswith("canonical."): 69 | if extensionName not in encounteredCanonicalExtensions: 70 | encounteredCanonicalExtensions.append(extensionName) 71 | resultList.append(extensionName) 72 | else: 73 | resultList.append(extensionName) 74 | 75 | return resultList 76 | 77 | if __name__ == "__main__": 78 | requirements = [ 79 | "furo", 80 | "pyspelling", 81 | "sphinx", 82 | "sphinx-autobuild", 83 | "sphinx-copybutton", 84 | "sphinx-design", 85 | "sphinxcontrib-jquery", 86 | "watchfiles", 87 | "GitPython" 88 | 89 | ] 90 | 91 | requirements.extend(custom_required_modules) 92 | 93 | if IsAnyCanonicalSphinxExtensionUsed(): 94 | requirements.append("canonical-sphinx-extensions") 95 | 96 | if IsNotFoundExtensionUsed(): 97 | requirements.append("sphinx-notfound-page") 98 | 99 | if IsSphinxTabsUsed(): 100 | requirements.append("sphinx-tabs") 101 | 102 | if AreRedirectsDefined(): 103 | requirements.append("sphinx-reredirects") 104 | 105 | if IsOpenGraphConfigured(): 106 | requirements.append("sphinxext-opengraph") 107 | 108 | if IsMyStParserUsed(): 109 | requirements.append("myst-parser") 110 | requirements.append("linkify-it-py") 111 | 112 | # removes duplicate entries 113 | requirements = list(dict.fromkeys(requirements)) 114 | requirements.sort() 115 | 116 | with open(".sphinx/requirements.txt", 'w') as requirements_file: 117 | requirements_file.write( 118 | "# DO NOT MODIFY THIS FILE DIRECTLY!\n" 119 | "#\n" 120 | "# This file is generated automatically.\n" 121 | "# Add custom requirements to the custom_required_modules\n" 122 | "# array in the custom_conf.py file and run:\n" 123 | "# make clean && make install\n") 124 | 125 | for requirement in requirements: 126 | requirements_file.write(requirement) 127 | requirements_file.write('\n') 128 | -------------------------------------------------------------------------------- /awspub/sns.py: -------------------------------------------------------------------------------- 1 | """ 2 | Methods used to handle notifications for AWS using SNS 3 | """ 4 | 5 | import json 6 | import logging 7 | from typing import Any, Dict, List 8 | 9 | import boto3 10 | from botocore.exceptions import ClientError 11 | from mypy_boto3_sns.client import SNSClient 12 | from mypy_boto3_sts.client import STSClient 13 | 14 | from awspub.common import _get_regions 15 | from awspub.context import Context 16 | from awspub.exceptions import AWSAuthorizationException, AWSNotificationException 17 | from awspub.s3 import S3 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | 22 | class SNSNotification(object): 23 | """ 24 | A data object that contains validation logic and 25 | structuring rules for SNS notification JSON 26 | """ 27 | 28 | def __init__(self, context: Context, image_name: str): 29 | """ 30 | Construct a message and verify that it is valid 31 | """ 32 | self._ctx: Context = context 33 | self._image_name: str = image_name 34 | self._s3: S3 = S3(context) 35 | 36 | @property 37 | def conf(self) -> List[Dict[str, Any]]: 38 | """ 39 | The sns configuration for the current image (based on "image_name") from context 40 | """ 41 | return self._ctx.conf["images"][self._image_name]["sns"] 42 | 43 | def _sns_regions(self, topic_config: Dict[Any, Any]) -> List[str]: 44 | """ 45 | Get the sns regions. Either configured in the sns configuration 46 | or all available regions. 47 | If a region is listed that is not available in the currently used partition, 48 | that region will be ignored (eg. having us-east-1 configured but running in the aws-cn 49 | partition doesn't include us-east-1 here). 50 | """ 51 | 52 | regions_configured = topic_config["regions"] if "regions" in topic_config else [] 53 | sns_regions = _get_regions(self._s3.bucket_region, regions_configured) 54 | 55 | return sns_regions 56 | 57 | def _get_topic_arn(self, topic_name: str, region_name: str) -> str: 58 | """ 59 | Calculate topic ARN based on partition, region, account and topic name 60 | :param topic_name: Name of topic 61 | :type topic_name: str 62 | :param region_name: name of region 63 | :type region_name: str 64 | :return: return topic ARN 65 | :rtype: str 66 | """ 67 | 68 | stsclient: STSClient = boto3.client("sts", region_name=region_name) 69 | resp = stsclient.get_caller_identity() 70 | 71 | account = resp["Account"] 72 | # resp["Arn"] has string format "arn:partition:iam::accountnumber:user/iam_role" 73 | partition = resp["Arn"].rsplit(":")[1] 74 | 75 | return f"arn:{partition}:sns:{region_name}:{account}:{topic_name}" 76 | 77 | def publish(self) -> None: 78 | """ 79 | send notification to subscribers 80 | """ 81 | 82 | for topic in self.conf: 83 | for topic_name, topic_config in topic.items(): 84 | for region_name in self._sns_regions(topic_config): 85 | snsclient: SNSClient = boto3.client("sns", region_name=region_name) 86 | try: 87 | snsclient.publish( 88 | TopicArn=self._get_topic_arn(topic_name, region_name), 89 | Subject=topic_config["subject"], 90 | Message=json.dumps(topic_config["message"]), 91 | MessageStructure="json", 92 | ) 93 | except ClientError as e: 94 | exception_code: str = e.response["Error"]["Code"] 95 | if exception_code == "AuthorizationError": 96 | raise AWSAuthorizationException( 97 | "Profile does not have a permission to send the SNS notification." 98 | " Please review the policy." 99 | ) 100 | else: 101 | raise AWSNotificationException(str(e)) 102 | logger.info( 103 | f"The SNS notification {topic_config['subject']}" 104 | f" for the topic {topic_name} in {region_name} has been sent." 105 | ) 106 | -------------------------------------------------------------------------------- /awspub/context.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import logging 3 | import pathlib 4 | from string import Template 5 | from typing import Dict 6 | 7 | from ruamel.yaml import YAML 8 | 9 | from awspub.configmodels import ConfigModel 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class Context: 15 | """ 16 | Context holds the used configuration and some 17 | automatically calculated values 18 | """ 19 | 20 | def __init__(self, conf_path: pathlib.Path, conf_template_mapping_path: pathlib.Path): 21 | self._conf_path = conf_path 22 | self._conf = None 23 | self._conf_template_mapping_path = conf_template_mapping_path 24 | self._conf_template_mapping = {} 25 | yaml = YAML(typ="safe") 26 | 27 | # read the config mapping first 28 | if self._conf_template_mapping_path: 29 | with open(self._conf_template_mapping_path, "r") as ctm: 30 | self._conf_template_mapping = yaml.load(ctm) 31 | logger.debug(f"loaded config template mapping for substitution: {self._conf_template_mapping}") 32 | 33 | # read the config itself 34 | with open(self._conf_path, "r") as f: 35 | template = Template(f.read()) 36 | # substitute the values in the config with values from the config template mapping 37 | ft = template.substitute(**self._conf_template_mapping) 38 | y = yaml.load(ft)["awspub"] 39 | self._conf = ConfigModel(**y).model_dump() 40 | logger.debug(f"config loaded and validated as: {self._conf}") 41 | 42 | # handle relative paths in config files. those are relative to the config file dirname 43 | if not self.conf["source"]["path"].is_absolute(): 44 | self.conf["source"]["path"] = pathlib.Path(self._conf_path).parent / self.conf["source"]["path"] 45 | 46 | for image_name, props in self.conf["images"].items(): 47 | if props["uefi_data"] and not self.conf["images"][image_name]["uefi_data"].is_absolute(): 48 | self.conf["images"][image_name]["uefi_data"] = ( 49 | pathlib.Path(self._conf_path).parent / self.conf["images"][image_name]["uefi_data"] 50 | ) 51 | 52 | # calculate the sha256 sum of the source file once 53 | self._source_sha256_obj = self._sha256sum(self.conf["source"]["path"]) 54 | self._source_sha256 = self._source_sha256_obj.hexdigest() 55 | 56 | @property 57 | def conf(self): 58 | return self._conf 59 | 60 | @property 61 | def source_sha256(self): 62 | """ 63 | The sha256 sum hexdigest of the source->path value from the given 64 | configuration. This value is used in different places (eg. to automatically 65 | upload to S3 with this value as key) 66 | """ 67 | return self._source_sha256 68 | 69 | @property 70 | def tags_dict(self) -> Dict[str, str]: 71 | """ 72 | Common tags which will be used for all AWS resources 73 | This includes tags defined in the configuration file 74 | but doesn't include image group specific tags. 75 | Usually the tags() method should be used. 76 | """ 77 | tags = dict() 78 | tags["awspub:source:filename"] = self.conf["source"]["path"].name 79 | tags["awspub:source:architecture"] = self.conf["source"]["architecture"] 80 | tags["awspub:source:sha256"] = self.source_sha256 81 | tags.update(self.conf.get("tags", {})) 82 | return tags 83 | 84 | @property 85 | def tags(self): 86 | """ 87 | Helper to make tags directly usable by the AWS EC2 API 88 | which requires a list of dicts with "Key" and "Value" defined. 89 | """ 90 | tags = [] 91 | for name, value in self.tags_dict.items(): 92 | tags.append({"Key": name, "Value": value}) 93 | return tags 94 | 95 | def _sha256sum(self, file_path: pathlib.Path): 96 | """ 97 | Calculate a sha256 sum for a given file 98 | 99 | :param file_path: the path to the local file to upload 100 | :type file_path: pathlib.Path 101 | :return: a haslib Hash object 102 | :rtype: _hashlib.HASH 103 | """ 104 | sha256_hash = hashlib.sha256() 105 | with open(file_path.resolve(), "rb") as f: 106 | for byte_block in iter(lambda: f.read(4096), b""): 107 | sha256_hash.update(byte_block) 108 | return sha256_hash 109 | -------------------------------------------------------------------------------- /awspub/tests/test_snapshot.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | from unittest.mock import MagicMock 3 | 4 | import pytest 5 | 6 | from awspub import context, snapshot 7 | 8 | curdir = pathlib.Path(__file__).parent.resolve() 9 | 10 | 11 | def test_snapshot__get_none_exist(): 12 | """ 13 | No snapshot exist - should return None 14 | """ 15 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 16 | s = snapshot.Snapshot(ctx) 17 | client_mock = MagicMock() 18 | assert s._get(client_mock, "snapshot-name") is None 19 | client_mock.describe_snapshots.assert_called_with( 20 | Filters=[ 21 | {"Name": "tag:Name", "Values": ["snapshot-name"]}, 22 | {"Name": "status", "Values": ["pending", "completed"]}, 23 | ], 24 | OwnerIds=["self"], 25 | ) 26 | 27 | 28 | def test_snapshot__get_one_exist(): 29 | """ 30 | One snapshot exist with the same name - should return the snapshot id 31 | """ 32 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 33 | s = snapshot.Snapshot(ctx) 34 | client_mock = MagicMock() 35 | client_mock.describe_snapshots = MagicMock(return_value={"Snapshots": [{"SnapshotId": "snap-1"}]}) 36 | assert s._get(client_mock, "snapshot-name") == "snap-1" 37 | client_mock.describe_snapshots.assert_called_with( 38 | Filters=[ 39 | {"Name": "tag:Name", "Values": ["snapshot-name"]}, 40 | {"Name": "status", "Values": ["pending", "completed"]}, 41 | ], 42 | OwnerIds=["self"], 43 | ) 44 | 45 | 46 | def test_snapshot__get_multiple_exist(): 47 | """ 48 | Multiple snapshots exist - _get() should raise an Exception 49 | """ 50 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 51 | s = snapshot.Snapshot(ctx) 52 | client_mock = MagicMock() 53 | client_mock.describe_snapshots = MagicMock( 54 | return_value={"Snapshots": [{"SnapshotId": "snap-1"}, {"SnapshotId": "snap-2"}]} 55 | ) 56 | with pytest.raises(Exception): 57 | s._get(client_mock, "snapshot-name") 58 | client_mock.describe_snapshots.assert_called_with( 59 | Filters=[ 60 | {"Name": "tag:Name", "Values": ["snapshot-name"]}, 61 | {"Name": "status", "Values": ["pending", "completed"]}, 62 | ], 63 | OwnerIds=["self"], 64 | ) 65 | 66 | 67 | def test_snapshot__get_import_snapshot_task_completed(): 68 | """ 69 | Test the Snapshot._get_import_snapshot_task() method 70 | """ 71 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 72 | s = snapshot.Snapshot(ctx) 73 | client_mock = MagicMock() 74 | client_mock.describe_import_snapshot_tasks = MagicMock( 75 | return_value={ 76 | "ImportSnapshotTasks": [ 77 | { 78 | "ImportTaskId": "import-snap-08b79d7b5d382d56b", 79 | "SnapshotTaskDetail": { 80 | "SnapshotId": "snap-0e0f3407a1b541c40", 81 | "Status": "completed", 82 | }, 83 | "Tags": [ 84 | {"Key": "Name", "Value": "021abb3f2338b5e57b5d870816565429659bc70769d71c486234ad60fe6aec67"}, 85 | ], 86 | } 87 | ], 88 | } 89 | ) 90 | assert ( 91 | s._get_import_snapshot_task(client_mock, "021abb3f2338b5e57b5d870816565429659bc70769d71c486234ad60fe6aec67") 92 | is None 93 | ) 94 | 95 | 96 | def test_snapshot__get_import_snapshot_task_active(): 97 | """ 98 | Test the Snapshot._get_import_snapshot_task() method 99 | """ 100 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 101 | s = snapshot.Snapshot(ctx) 102 | client_mock = MagicMock() 103 | client_mock.describe_import_snapshot_tasks = MagicMock( 104 | return_value={ 105 | "ImportSnapshotTasks": [ 106 | { 107 | "ImportTaskId": "import-snap-08b79d7b5d382d56b", 108 | "SnapshotTaskDetail": { 109 | "SnapshotId": "snap-0e0f3407a1b541c40", 110 | "Status": "active", 111 | }, 112 | "Tags": [ 113 | {"Key": "Name", "Value": "021abb3f2338b5e57b5d870816565429659bc70769d71c486234ad60fe6aec67"}, 114 | ], 115 | } 116 | ], 117 | } 118 | ) 119 | assert ( 120 | s._get_import_snapshot_task(client_mock, "021abb3f2338b5e57b5d870816565429659bc70769d71c486234ad60fe6aec67") 121 | == "import-snap-08b79d7b5d382d56b" 122 | ) 123 | -------------------------------------------------------------------------------- /_static/furo_colors.css: -------------------------------------------------------------------------------- 1 | body { 2 | --color-code-background: #f8f8f8; 3 | --color-code-foreground: black; 4 | --code-font-size: 1rem; 5 | --font-stack: Ubuntu variable, Ubuntu, -apple-system, Segoe UI, Roboto, Oxygen, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, sans-serif; 6 | --font-stack--monospace: Ubuntu Mono variable, Ubuntu Mono, Consolas, Monaco, Courier, monospace; 7 | --color-foreground-primary: #111; 8 | --color-foreground-secondary: var(--color-foreground-primary); 9 | --color-foreground-muted: #333; 10 | --color-background-secondary: #FFF; 11 | --color-background-hover: #f2f2f2; 12 | --color-brand-primary: #111; 13 | --color-brand-content: #06C; 14 | --color-api-background: #E3E3E3; 15 | --color-inline-code-background: rgba(0,0,0,.03); 16 | --color-sidebar-link-text: #111; 17 | --color-sidebar-item-background--current: #ebebeb; 18 | --color-sidebar-item-background--hover: #f2f2f2; 19 | --toc-font-size: var(--font-size--small); 20 | --color-admonition-title-background--note: var(--color-background-primary); 21 | --color-admonition-title-background--tip: var(--color-background-primary); 22 | --color-admonition-title-background--important: var(--color-background-primary); 23 | --color-admonition-title-background--caution: var(--color-background-primary); 24 | --color-admonition-title--note: #24598F; 25 | --color-admonition-title--tip: #24598F; 26 | --color-admonition-title--important: #C7162B; 27 | --color-admonition-title--caution: #F99B11; 28 | --color-highlighted-background: #EBEBEB; 29 | --color-link-underline: var(--color-link); 30 | --color-link-underline--hover: var(--color-link); 31 | --color-link-underline--visited: var(--color-link--visited); 32 | --color-link-underline--visited--hover: var(--color-link--visited); 33 | --color-version-popup: #772953; 34 | } 35 | 36 | @media not print { 37 | body[data-theme="dark"] { 38 | --color-code-background: #202020; 39 | --color-code-foreground: #d0d0d0; 40 | --color-foreground-secondary: var(--color-foreground-primary); 41 | --color-foreground-muted: #CDCDCD; 42 | --color-background-secondary: var(--color-background-primary); 43 | --color-background-hover: #666; 44 | --color-brand-primary: #fff; 45 | --color-brand-content: #69C; 46 | --color-sidebar-link-text: #f7f7f7; 47 | --color-sidebar-item-background--current: #666; 48 | --color-sidebar-item-background--hover: #333; 49 | --color-admonition-background: transparent; 50 | --color-admonition-title-background--note: var(--color-background-primary); 51 | --color-admonition-title-background--tip: var(--color-background-primary); 52 | --color-admonition-title-background--important: var(--color-background-primary); 53 | --color-admonition-title-background--caution: var(--color-background-primary); 54 | --color-admonition-title--note: #24598F; 55 | --color-admonition-title--tip: #24598F; 56 | --color-admonition-title--important: #C7162B; 57 | --color-admonition-title--caution: #F99B11; 58 | --color-highlighted-background: #666; 59 | --color-version-popup: #F29879; 60 | } 61 | @media (prefers-color-scheme: dark) { 62 | body:not([data-theme="light"]) { 63 | --color-api-background: #A4A4A4; 64 | --color-code-background: #202020; 65 | --color-code-foreground: #d0d0d0; 66 | --color-foreground-secondary: var(--color-foreground-primary); 67 | --color-foreground-muted: #CDCDCD; 68 | --color-background-secondary: var(--color-background-primary); 69 | --color-background-hover: #666; 70 | --color-brand-primary: #fff; 71 | --color-brand-content: #69C; 72 | --color-sidebar-link-text: #f7f7f7; 73 | --color-sidebar-item-background--current: #666; 74 | --color-sidebar-item-background--hover: #333; 75 | --color-admonition-background: transparent; 76 | --color-admonition-title-background--note: var(--color-background-primary); 77 | --color-admonition-title-background--tip: var(--color-background-primary); 78 | --color-admonition-title-background--important: var(--color-background-primary); 79 | --color-admonition-title-background--caution: var(--color-background-primary); 80 | --color-admonition-title--note: #24598F; 81 | --color-admonition-title--tip: #24598F; 82 | --color-admonition-title--important: #C7162B; 83 | --color-admonition-title--caution: #F99B11; 84 | --color-highlighted-background: #666; 85 | --color-link: #F9FCFF; 86 | --color-version-popup: #F29879; 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /Makefile.sp: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | # `Makefile.sp` is from the Sphinx starter pack and should not be 4 | # modified. 5 | # Add your customisation to `Makefile` instead. 6 | 7 | # You can set these variables from the command line, and also 8 | # from the environment for the first two. 9 | SPHINXDIR = .sphinx 10 | SPHINXOPTS ?= -c . -d $(SPHINXDIR)/.doctrees -j auto 11 | SPHINXBUILD ?= sphinx-build 12 | SOURCEDIR = . 13 | BUILDDIR = _build 14 | VENVDIR = $(SPHINXDIR)/venv 15 | PA11Y = $(SPHINXDIR)/node_modules/pa11y/bin/pa11y.js --config $(SPHINXDIR)/pa11y.json 16 | VENV = $(VENVDIR)/bin/activate 17 | TARGET = * 18 | ALLFILES = *.rst **/*.rst 19 | ADDPREREQS ?= 20 | 21 | .PHONY: sp-full-help sp-woke-install sp-pa11y-install sp-install sp-run sp-html \ 22 | sp-epub sp-serve sp-clean sp-clean-doc sp-spelling sp-spellcheck sp-linkcheck sp-woke \ 23 | sp-pa11y Makefile.sp sp-vale 24 | 25 | sp-full-help: $(VENVDIR) 26 | @. $(VENV); $(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 27 | @echo "\n\033[1;31mNOTE: This help texts shows unsupported targets!\033[0m" 28 | @echo "Run 'make help' to see supported targets." 29 | 30 | # Shouldn't assume that venv is available on Ubuntu by default; discussion here: 31 | # https://bugs.launchpad.net/ubuntu/+source/python3.4/+bug/1290847 32 | $(SPHINXDIR)/requirements.txt: 33 | @python3 -c "import venv" || \ 34 | (echo "You must install python3-venv before you can build the documentation."; exit 1) 35 | python3 -m venv $(VENVDIR) 36 | @if [ ! -z "$(ADDPREREQS)" ]; then \ 37 | . $(VENV); pip install \ 38 | $(PIPOPTS) --require-virtualenv $(ADDPREREQS); \ 39 | fi 40 | . $(VENV); python3 $(SPHINXDIR)/build_requirements.py 41 | 42 | # If requirements are updated, venv should be rebuilt and timestamped. 43 | $(VENVDIR): $(SPHINXDIR)/requirements.txt 44 | @echo "... setting up virtualenv" 45 | python3 -m venv $(VENVDIR) 46 | . $(VENV); pip install $(PIPOPTS) --require-virtualenv \ 47 | --upgrade -r $(SPHINXDIR)/requirements.txt \ 48 | --log $(VENVDIR)/pip_install.log 49 | @test ! -f $(VENVDIR)/pip_list.txt || \ 50 | mv $(VENVDIR)/pip_list.txt $(VENVDIR)/pip_list.txt.bak 51 | @. $(VENV); pip list --local --format=freeze > $(VENVDIR)/pip_list.txt 52 | @touch $(VENVDIR) 53 | 54 | sp-woke-install: 55 | @type woke >/dev/null 2>&1 || \ 56 | { echo "Installing \"woke\" snap... \n"; sudo snap install woke; } 57 | 58 | sp-pa11y-install: 59 | @type $(PA11Y) >/dev/null 2>&1 || { \ 60 | echo "Installing \"pa11y\" from npm... \n"; \ 61 | mkdir -p $(SPHINXDIR)/node_modules/ ; \ 62 | npm install --prefix $(SPHINXDIR) pa11y; \ 63 | } 64 | 65 | sp-install: $(VENVDIR) 66 | 67 | sp-run: sp-install 68 | . $(VENV); sphinx-autobuild -b dirhtml "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 69 | 70 | # Doesn't depend on $(BUILDDIR) to rebuild properly at every run. 71 | sp-html: sp-install 72 | . $(VENV); $(SPHINXBUILD) -W --keep-going -b dirhtml "$(SOURCEDIR)" "$(BUILDDIR)" -w $(SPHINXDIR)/warnings.txt $(SPHINXOPTS) 73 | 74 | sp-epub: sp-install 75 | . $(VENV); $(SPHINXBUILD) -b epub "$(SOURCEDIR)" "$(BUILDDIR)" -w $(SPHINXDIR)/warnings.txt $(SPHINXOPTS) 76 | 77 | sp-serve: sp-html 78 | cd "$(BUILDDIR)"; python3 -m http.server --bind 127.0.0.1 8000 79 | 80 | sp-clean: sp-clean-doc 81 | @test ! -e "$(VENVDIR)" -o -d "$(VENVDIR)" -a "$(abspath $(VENVDIR))" != "$(VENVDIR)" 82 | rm -rf $(VENVDIR) 83 | rm -f $(SPHINXDIR)/requirements.txt 84 | rm -rf $(SPHINXDIR)/node_modules/ 85 | rm -rf $(SPHINXDIR)/styles 86 | rm -rf $(SPHINXDIR)/vale.ini 87 | 88 | sp-clean-doc: 89 | git clean -fx "$(BUILDDIR)" 90 | rm -rf $(SPHINXDIR)/.doctrees 91 | 92 | sp-spellcheck: 93 | . $(VENV) ; python3 -m pyspelling -c $(SPHINXDIR)/spellingcheck.yaml -j $(shell nproc) 94 | 95 | sp-spelling: sp-html sp-spellcheck 96 | 97 | sp-linkcheck: sp-install 98 | . $(VENV) ; $(SPHINXBUILD) -b linkcheck "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) || { grep --color -F "[broken]" "$(BUILDDIR)/output.txt"; exit 1; } 99 | exit 0 100 | 101 | sp-woke: sp-woke-install 102 | woke $(ALLFILES) --exit-1-on-failure \ 103 | -c https://github.com/canonical/Inclusive-naming/raw/main/config.yml 104 | 105 | sp-pa11y: sp-pa11y-install sp-html 106 | find $(BUILDDIR) -name *.html -print0 | xargs -n 1 -0 $(PA11Y) 107 | 108 | sp-vale: sp-install 109 | @. $(VENV); test -d $(SPHINXDIR)/venv/lib/python*/site-packages/vale || pip install vale 110 | @. $(VENV); test -f $(SPHINXDIR)/vale.ini || python3 $(SPHINXDIR)/get_vale_conf.py 111 | @. $(VENV); find $(SPHINXDIR)/venv/lib/python*/site-packages/vale/vale_bin -size 195c -exec vale --config "$(SPHINXDIR)/vale.ini" $(TARGET) > /dev/null \; 112 | @echo "" 113 | @echo "Running Vale against $(TARGET). To change target set TARGET= with make command" 114 | @echo "" 115 | @. $(VENV); vale --config "$(SPHINXDIR)/vale.ini" --glob='*.{md,txt,rst}' $(TARGET) 116 | 117 | 118 | 119 | # Catch-all target: route all unknown targets to Sphinx using the new 120 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 121 | %: Makefile.sp 122 | . $(VENV); $(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 123 | -------------------------------------------------------------------------------- /awspub/cli/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import argparse 4 | import json 5 | import logging 6 | import pathlib 7 | import sys 8 | 9 | import awspub 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | def _create(args) -> None: 15 | """ 16 | Create images based on the given configuration and write json 17 | data to the given output 18 | """ 19 | images_by_name, images_by_group = awspub.create(args.config, args.config_mapping, args.group) 20 | images_json = json.dumps({"images": images_by_name, "images-by-group": images_by_group}, indent=4) 21 | args.output.write(images_json) 22 | 23 | 24 | def _list(args) -> None: 25 | """ 26 | List images based on the given configuration and write json 27 | data to the given output 28 | """ 29 | images_by_name, images_by_group = awspub.list(args.config, args.config_mapping, args.group) 30 | images_json = json.dumps({"images": images_by_name, "images-by-group": images_by_group}, indent=4) 31 | args.output.write(images_json) 32 | 33 | 34 | def _cleanup(args) -> None: 35 | """ 36 | Cleanup available images 37 | """ 38 | awspub.cleanup(args.config, args.config_mapping, args.group) 39 | 40 | 41 | def _publish(args) -> None: 42 | """ 43 | Make available images public 44 | """ 45 | awspub.publish(args.config, args.config_mapping, args.group) 46 | 47 | 48 | def _parser(): 49 | parser = argparse.ArgumentParser(description="AWS EC2 publication tool") 50 | parser.add_argument("--log-level", choices=["info", "debug"], default="info") 51 | parser.add_argument("--log-file", type=pathlib.Path, help="write log to given file instead of stdout") 52 | parser.add_argument("--log-console", action="store_true", help="write log to stdout") 53 | p_sub = parser.add_subparsers(help="sub-command help") 54 | 55 | # create 56 | p_create = p_sub.add_parser("create", help="Create images") 57 | p_create.add_argument( 58 | "--output", type=argparse.FileType("w+"), help="output file path. defaults to stdout", default=sys.stdout 59 | ) 60 | p_create.add_argument("--config-mapping", type=pathlib.Path, help="the image config template mapping file path") 61 | p_create.add_argument("--group", type=str, help="only handles images from given group") 62 | p_create.add_argument("config", type=pathlib.Path, help="the image configuration file path") 63 | p_create.set_defaults(func=_create) 64 | 65 | # list 66 | p_list = p_sub.add_parser("list", help="List images (but don't modify anything)") 67 | p_list.add_argument( 68 | "--output", type=argparse.FileType("w+"), help="output file path. defaults to stdout", default=sys.stdout 69 | ) 70 | p_list.add_argument("--config-mapping", type=pathlib.Path, help="the image config template mapping file path") 71 | p_list.add_argument("--group", type=str, help="only handles images from given group") 72 | p_list.add_argument("config", type=pathlib.Path, help="the image configuration file path") 73 | p_list.set_defaults(func=_list) 74 | 75 | # cleanup 76 | p_cleanup = p_sub.add_parser("cleanup", help="Cleanup images") 77 | p_cleanup.add_argument( 78 | "--output", type=argparse.FileType("w+"), help="output file path. defaults to stdout", default=sys.stdout 79 | ) 80 | p_cleanup.add_argument("--config-mapping", type=pathlib.Path, help="the image config template mapping file path") 81 | p_cleanup.add_argument("--group", type=str, help="only handles images from given group") 82 | p_cleanup.add_argument("config", type=pathlib.Path, help="the image configuration file path") 83 | 84 | p_cleanup.set_defaults(func=_cleanup) 85 | 86 | # publish 87 | p_publish = p_sub.add_parser("publish", help="Publish images") 88 | p_publish.add_argument( 89 | "--output", type=argparse.FileType("w+"), help="output file path. defaults to stdout", default=sys.stdout 90 | ) 91 | p_publish.add_argument("--config-mapping", type=pathlib.Path, help="the image config template mapping file path") 92 | p_publish.add_argument("--group", type=str, help="only handles images from given group") 93 | p_publish.add_argument("config", type=pathlib.Path, help="the image configuration file path") 94 | 95 | p_publish.set_defaults(func=_publish) 96 | 97 | return parser 98 | 99 | 100 | def main(): 101 | parser = _parser() 102 | args = parser.parse_args() 103 | log_formatter = logging.Formatter("%(asctime)s:%(name)s:%(levelname)s:%(message)s") 104 | # log level 105 | loglevel = logging.INFO 106 | if args.log_level == "debug": 107 | loglevel = logging.DEBUG 108 | root_logger = logging.getLogger() 109 | root_logger.setLevel(loglevel) 110 | # log file 111 | if args.log_file: 112 | file_handler = logging.FileHandler(filename=args.log_file) 113 | file_handler.setFormatter(log_formatter) 114 | root_logger.addHandler(file_handler) 115 | # log console 116 | if args.log_console: 117 | console_handler = logging.StreamHandler() 118 | console_handler.setFormatter(log_formatter) 119 | root_logger.addHandler(console_handler) 120 | if "func" not in args: 121 | sys.exit(parser.print_help()) 122 | args.func(args) 123 | sys.exit(0) 124 | 125 | 126 | if __name__ == "__main__": 127 | main() 128 | -------------------------------------------------------------------------------- /awspub/tests/fixtures/config1.yaml: -------------------------------------------------------------------------------- 1 | awspub: 2 | s3: 3 | bucket_name: "bucket1" 4 | 5 | source: 6 | # config1.vmdk generated with 7 | # dd if=/dev/zero of=config1.raw bs=1K count=1 8 | # qemu-img convert -f raw -O vmdk -o subformat=streamOptimized config1.raw config1.vmdk 9 | path: "config1.vmdk" 10 | architecture: "x86_64" 11 | 12 | images: 13 | "test-image-1": 14 | description: | 15 | A test image 16 | boot_mode: "uefi" 17 | regions: 18 | - region1 19 | - region2 20 | temporary: true 21 | groups: 22 | - group1 23 | - group2 24 | "test-image-2": 25 | description: | 26 | A test image with a separate snapshot 27 | boot_mode: "uefi" 28 | separate_snapshot: true 29 | groups: 30 | - group1 31 | "test-image-3": 32 | description: | 33 | A test image with a separate snapshot and a billing code 34 | boot_mode: "uefi" 35 | separate_snapshot: true 36 | billing_products: 37 | - billingcode 38 | "test-image-4": 39 | description: | 40 | A test image without a separate snapshot but a billing product 41 | boot_mode: "uefi-preferred" 42 | billing_products: 43 | - billingcode 44 | "test-image-5": 45 | description: | 46 | A test image without a separate snapshot but multiple billing products 47 | boot_mode: "uefi-preferred" 48 | billing_products: 49 | - billingcode1 50 | - billingcode2 51 | "test-image-6": 52 | description: | 53 | A test image without a separate snapshot but multiple billing products 54 | boot_mode: "uefi-preferred" 55 | regions: 56 | - "eu-central-1" 57 | public: true 58 | tags: 59 | key1: value1 60 | "test-image-7": 61 | description: | 62 | A test image without a separate snapshot but multiple billing products 63 | boot_mode: "uefi-preferred" 64 | regions: 65 | - "eu-central-1" 66 | public: true 67 | temporary: true 68 | tags: 69 | key2: name 70 | name: "not-foobar" 71 | "test-image-8": 72 | description: | 73 | A test image without a separate snapshot but multiple billing products 74 | boot_mode: "uefi-preferred" 75 | regions: 76 | - "eu-central-1" 77 | - "us-east-1" 78 | public: true 79 | tags: 80 | key1: value1 81 | share: 82 | - "123456789123" 83 | - "221020170000" 84 | - "aws:290620200000" 85 | - "aws-cn:334455667788" 86 | - "arn:aws:organizations::123456789012:organization/o-123example" 87 | - "arn:aws-cn:organizations::334455667788:ou/o-123example/ou-1234-5example" 88 | marketplace: 89 | entity_id: "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" 90 | access_role_arn: "arn:aws:iam::xxxxxxxxxxxx:role/AWSMarketplaceAccess" 91 | version_title: "1.0.0" 92 | release_notes: "N/A" 93 | user_name: "ubuntu" 94 | scanning_port: 22 95 | os_name: "UBUNTU" 96 | os_version: "22.04" 97 | usage_instructions: | 98 | You can use me 99 | recommended_instance_type: "m5.large" 100 | security_groups: 101 | - 102 | from_port: 22 103 | ip_protocol: "tcp" 104 | ip_ranges: 105 | - "0.0.0.0/0" 106 | to_port: 22 107 | ssm_parameter: 108 | - 109 | name: /test/image 110 | - 111 | name: /test/another-image 112 | "test-image-9": 113 | boot_mode: "uefi" 114 | description: | 115 | A test image without a separate snapshot but multiple billing products 116 | regions: 117 | - "eu-central-1" 118 | - "us-east-1" 119 | ssm_parameter: 120 | - 121 | name: /awspub-test/param2 122 | allow_overwrite: true 123 | "test-image-10": 124 | boot_mode: "uefi" 125 | description: | 126 | A test image without a separate snapshot but single sns configs 127 | regions: 128 | - "us-east-1" 129 | sns: 130 | - "topic1": 131 | subject: "topic1-subject" 132 | message: 133 | default: "default-message" 134 | email: "email-message" 135 | regions: 136 | - "us-east-1" 137 | "test-image-11": 138 | boot_mode: "uefi" 139 | description: | 140 | A test image without a separate snapshot but multiple sns configs 141 | regions: 142 | - "us-east-1" 143 | - "eu-central-1" 144 | sns: 145 | - "topic1": 146 | subject: "topic1-subject" 147 | message: 148 | default: "default-message" 149 | email: "email-message" 150 | regions: 151 | - "us-east-1" 152 | - "topic2": 153 | subject: "topic2-subject" 154 | message: 155 | default: "default-message" 156 | regions: 157 | - "us-gov-1" 158 | - "eu-central-1" 159 | "test-image-12": 160 | boot_mode: "uefi" 161 | description: | 162 | A test image without a separate snapshot but single sns configs 163 | regions: 164 | - "us-east-1" 165 | sns: 166 | - "topic1": 167 | subject: "topic1-subject" 168 | message: 169 | default: "default-message" 170 | email: "email-message" 171 | 172 | tags: 173 | name: "foobar" 174 | -------------------------------------------------------------------------------- /awspub/image_marketplace.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | from typing import Any, Dict 4 | 5 | import boto3 6 | from mypy_boto3_marketplace_catalog import MarketplaceCatalogClient 7 | 8 | from awspub.context import Context 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | class ImageMarketplace: 14 | """ 15 | Handle AWS Marketplace API interaction 16 | """ 17 | 18 | def __init__(self, context: Context, image_name: str): 19 | self._ctx: Context = context 20 | self._image_name: str = image_name 21 | # marketplace-catalog API is only available via us-east-1 22 | self._mpclient: MarketplaceCatalogClient = boto3.client("marketplace-catalog", region_name="us-east-1") 23 | 24 | @property 25 | def conf(self) -> Dict[str, Any]: 26 | """ 27 | The marketplace configuration for the current image (based on "image_name") from context 28 | """ 29 | return self._ctx.conf["images"][self._image_name]["marketplace"] 30 | 31 | def request_new_version(self, image_id: str) -> None: 32 | """ 33 | Request a new Marketplace version for the given image Id 34 | 35 | :param image_id: an image Id (in the format 'ami-123') 36 | :type image_id: str 37 | """ 38 | entity = self._mpclient.describe_entity(Catalog="AWSMarketplace", EntityId=self.conf["entity_id"]) 39 | # check if the version already exists 40 | for version in entity["DetailsDocument"].get("Versions", []): 41 | if version["VersionTitle"] == self.conf["version_title"]: 42 | logger.info(f"Marketplace version '{self.conf['version_title']}' already exists. Do nothing") 43 | return 44 | 45 | # version doesn't exist already - create a new one 46 | changeset = self._request_new_version_changeset(image_id) 47 | changeset_name = ImageMarketplace.sanitize_changeset_name( 48 | f"New version request for {self.conf['version_title']}" 49 | ) 50 | resp = self._mpclient.start_change_set( 51 | Catalog="AWSMarketplace", ChangeSet=changeset, ChangeSetTags=self._ctx.tags, ChangeSetName=changeset_name 52 | ) 53 | logger.info( 54 | f"new version '{self.conf['version_title']}' (image: {image_id}) for entity " 55 | f"{self.conf['entity_id']} requested (changeset-id: {resp['ChangeSetId']})" 56 | ) 57 | 58 | def _request_new_version_changeset(self, image_id: str): 59 | """ 60 | Create a changeset structure for a new AmiProduct version 61 | See https://docs.aws.amazon.com/marketplace-catalog/latest/api-reference/ami-products.html#ami-add-version 62 | 63 | :param image_id: an image Id (in the format 'ami-123') 64 | :type image_id: str 65 | :return: A changeset structure to request a new version 66 | :rtype: List[Dict[str, Any]] 67 | """ 68 | return [ 69 | { 70 | "ChangeType": "AddDeliveryOptions", 71 | "Entity": { 72 | "Identifier": self.conf["entity_id"], 73 | "Type": "AmiProduct@1.0", 74 | }, 75 | "DetailsDocument": { 76 | "Version": { 77 | "VersionTitle": self.conf["version_title"], 78 | "ReleaseNotes": self.conf["release_notes"], 79 | }, 80 | "DeliveryOptions": [ 81 | { 82 | "Details": { 83 | "AmiDeliveryOptionDetails": { 84 | "AmiSource": { 85 | "AmiId": image_id, 86 | "AccessRoleArn": self.conf["access_role_arn"], 87 | "UserName": self.conf["user_name"], 88 | "OperatingSystemName": self.conf["os_name"], 89 | "OperatingSystemVersion": self.conf["os_version"], 90 | }, 91 | "UsageInstructions": self.conf["usage_instructions"], 92 | "RecommendedInstanceType": self.conf["recommended_instance_type"], 93 | "SecurityGroups": [ 94 | { 95 | "IpProtocol": sg["ip_protocol"], 96 | "IpRanges": [ipr for ipr in sg["ip_ranges"]], 97 | "FromPort": sg["from_port"], 98 | "ToPort": sg["to_port"], 99 | } 100 | for sg in self.conf["security_groups"] 101 | ], 102 | } 103 | } 104 | } 105 | ], 106 | }, 107 | } 108 | ] 109 | 110 | @staticmethod 111 | def sanitize_changeset_name(name: str) -> str: 112 | # changeset names can only include alphanumeric characters, whitespace, and any combination of the following 113 | # characters: _+=.:@- This regex pattern takes the list of allowed characters, does a negative match on the 114 | # string and removes all matched (i.e. disallowed) characters. See [0] for reference. 115 | # [0] https://docs.aws.amazon.com/marketplace-catalog/latest/api-reference/API_StartChangeSet.html#API_StartChangeSet_RequestSyntax # noqa 116 | return re.sub( 117 | "[^\\w\\s+=.:@-]", 118 | "", 119 | name, 120 | ) 121 | -------------------------------------------------------------------------------- /awspub/api.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pathlib 3 | from typing import Dict, Iterator, List, Optional, Tuple 4 | 5 | from awspub.context import Context 6 | from awspub.image import Image, _ImageInfo 7 | from awspub.s3 import S3 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | def _images_grouped( 13 | images: List[Tuple[str, Image, Dict[str, _ImageInfo]]], group: Optional[str] 14 | ) -> Tuple[Dict[str, Dict[str, str]], Dict[str, Dict[str, Dict[str, str]]]]: 15 | """ 16 | Group the given images by name and by group 17 | 18 | :param images: the images 19 | :type images: List[Tuple[str, Image, Dict[str, _ImageInfo]]] 20 | :param group: a optional group name 21 | :type group: Optional[str] 22 | :return: the images grouped by name and by group 23 | :rtype: Tuple[Dict[str, Dict[str, str]], Dict[str, Dict[str, Dict[str, str]]] 24 | """ 25 | images_by_name: Dict[str, Dict[str, str]] = dict() 26 | images_by_group: Dict[str, Dict[str, Dict[str, str]]] = dict() 27 | for image_name, image, image_result in images: 28 | images_region_id: Dict[str, str] = {key: val.image_id for (key, val) in image_result.items()} 29 | images_by_name[image_name] = images_region_id 30 | for image_group in image.conf.get("groups", []): 31 | if group and image_group != group: 32 | continue 33 | if not images_by_group.get(image_group): 34 | images_by_group[image_group] = {} 35 | images_by_group[image_group][image_name] = images_region_id 36 | return images_by_name, images_by_group 37 | 38 | 39 | def _images_filtered(context: Context, group: Optional[str]) -> Iterator[Tuple[str, Image]]: 40 | """ 41 | Filter the images from ctx based on the given args 42 | 43 | :param context: the context 44 | :type context: a awspub.context.Context instance 45 | :param group: a optional group name 46 | :type group: Optional[str] 47 | """ 48 | for image_name in context.conf["images"].keys(): 49 | image = Image(context, image_name) 50 | if group: 51 | # limit the images to process to the group given on the command line 52 | if group not in image.conf.get("groups", []): 53 | logger.info(f"skipping image {image_name} because not part of group {group}") 54 | continue 55 | 56 | logger.info(f"processing image {image_name} (group: {group})") 57 | yield image_name, image 58 | 59 | 60 | def create( 61 | config: pathlib.Path, config_mapping: pathlib.Path, group: Optional[str] 62 | ) -> Tuple[Dict[str, Dict[str, str]], Dict[str, Dict[str, Dict[str, str]]]]: 63 | """ 64 | Create images in the partition of the used account based on 65 | the given configuration file and the config mapping 66 | 67 | :param config: the configuration file path 68 | :type config: pathlib.Path 69 | :param config_mapping: the config template mapping file path 70 | :type config_mapping: pathlib.Path 71 | :param group: only handles images from given group 72 | :type group: Optional[str] 73 | :return: the images grouped by name and by group 74 | :rtype: Tuple[Dict[str, Dict[str, str]], Dict[str, Dict[str, Dict[str, str]]] 75 | """ 76 | 77 | ctx = Context(config, config_mapping) 78 | s3 = S3(ctx) 79 | s3.upload_file(ctx.conf["source"]["path"]) 80 | images: List[Tuple[str, Image, Dict[str, _ImageInfo]]] = [] 81 | for image_name, image in _images_filtered(ctx, group): 82 | image_result: Dict[str, _ImageInfo] = image.create() 83 | images.append((image_name, image, image_result)) 84 | images_by_name, images_by_group = _images_grouped(images, group) 85 | return images_by_name, images_by_group 86 | 87 | 88 | def list( 89 | config: pathlib.Path, config_mapping: pathlib.Path, group: Optional[str] 90 | ) -> Tuple[Dict[str, Dict[str, str]], Dict[str, Dict[str, Dict[str, str]]]]: 91 | """ 92 | List images in the partition of the used account based on 93 | the given configuration file and the config mapping 94 | 95 | :param config: the configuration file path 96 | :type config: pathlib.Path 97 | :param config_mapping: the config template mapping file path 98 | :type config_mapping: pathlib.Path 99 | :param group: only handles images from given group 100 | :type group: Optional[str] 101 | :return: the images grouped by name and by group 102 | :rtype: Tuple[Dict[str, Dict[str, str]], Dict[str, Dict[str, Dict[str, str]]] 103 | """ 104 | ctx = Context(config, config_mapping) 105 | images: List[Tuple[str, Image, Dict[str, _ImageInfo]]] = [] 106 | for image_name, image in _images_filtered(ctx, group): 107 | image_result: Dict[str, _ImageInfo] = image.list() 108 | images.append((image_name, image, image_result)) 109 | 110 | images_by_name, images_by_group = _images_grouped(images, group) 111 | return images_by_name, images_by_group 112 | 113 | 114 | def publish(config: pathlib.Path, config_mapping: pathlib.Path, group: Optional[str]): 115 | """ 116 | Make available images in the partition of the used account based on 117 | the given configuration file public 118 | 119 | :param config: the configuration file path 120 | :type config: pathlib.Path 121 | :param config_mapping: the config template mapping file path 122 | :type config_mapping: pathlib.Path 123 | :param group: only handles images from given group 124 | :type group: Optional[str] 125 | """ 126 | ctx = Context(config, config_mapping) 127 | for image_name, image in _images_filtered(ctx, group): 128 | image.publish() 129 | 130 | 131 | def cleanup(config: pathlib.Path, config_mapping: pathlib.Path, group: Optional[str]): 132 | """ 133 | Cleanup available images in the partition of the used account based on 134 | the given configuration file 135 | 136 | :param config: the configuration file path 137 | :type config: pathlib.Path 138 | :param config_mapping: the config template mapping file path 139 | :type config_mapping: pathlib.Path 140 | :param group: only handles images from given group 141 | :type group: Optional[str] 142 | """ 143 | ctx = Context(config, config_mapping) 144 | for image_name, image in _images_filtered(ctx, group): 145 | image.cleanup() 146 | -------------------------------------------------------------------------------- /awspub/tests/test_sns.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | from unittest.mock import patch 3 | 4 | import botocore.exceptions 5 | import pytest 6 | 7 | from awspub import context, exceptions, sns 8 | 9 | curdir = pathlib.Path(__file__).parent.resolve() 10 | 11 | 12 | @pytest.mark.parametrize( 13 | "imagename,called_sns_publish, publish_call_count", 14 | [ 15 | ("test-image-10", True, 1), 16 | ("test-image-11", True, 2), 17 | ("test-image-12", True, 2), 18 | ], 19 | ) 20 | def test_sns_publish(imagename, called_sns_publish, publish_call_count): 21 | """ 22 | Test the send_notification logic 23 | """ 24 | with patch("boto3.client") as bclient_mock: 25 | instance = bclient_mock.return_value 26 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 27 | instance.describe_regions.return_value = { 28 | "Regions": [{"RegionName": "eu-central-1"}, {"RegionName": "us-east-1"}] 29 | } 30 | instance.list_buckets.return_value = {"Buckets": [{"Name": "bucket1"}]} 31 | 32 | sns.SNSNotification(ctx, imagename).publish() 33 | assert instance.publish.called == called_sns_publish 34 | assert instance.publish.call_count == publish_call_count 35 | 36 | 37 | @pytest.mark.parametrize( 38 | "imagename", 39 | [ 40 | ("test-image-10"), 41 | ("test-image-11"), 42 | ("test-image-12"), 43 | ], 44 | ) 45 | def test_sns_publish_fail_with_invalid_topic(imagename): 46 | """ 47 | Test the send_notification logic 48 | """ 49 | with patch("boto3.client") as bclient_mock: 50 | instance = bclient_mock.return_value 51 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 52 | instance.describe_regions.return_value = { 53 | "Regions": [{"RegionName": "eu-central-1"}, {"RegionName": "us-east-1"}] 54 | } 55 | instance.list_buckets.return_value = {"Buckets": [{"Name": "bucket1"}]} 56 | 57 | # topic1 is invalid topic 58 | def side_effect(*args, **kwargs): 59 | topic_arn = kwargs.get("TopicArn") 60 | if "topic1" in topic_arn and "us-east-1" in topic_arn: 61 | error_reponse = { 62 | "Error": { 63 | "Code": "NotFoundException", 64 | "Message": "An error occurred (NotFound) when calling the Publish operation: " 65 | "Topic does not exist.", 66 | } 67 | } 68 | raise botocore.exceptions.ClientError(error_reponse, "") 69 | 70 | instance.publish.side_effect = side_effect 71 | 72 | with pytest.raises(exceptions.AWSNotificationException): 73 | sns.SNSNotification(ctx, imagename).publish() 74 | 75 | 76 | @pytest.mark.parametrize( 77 | "imagename", 78 | [ 79 | ("test-image-10"), 80 | ("test-image-11"), 81 | ("test-image-12"), 82 | ], 83 | ) 84 | def test_sns_publish_fail_with_unauthorized_user(imagename): 85 | """ 86 | Test the send_notification logic 87 | """ 88 | with patch("boto3.client") as bclient_mock: 89 | instance = bclient_mock.return_value 90 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 91 | instance.describe_regions.return_value = { 92 | "Regions": [{"RegionName": "eu-central-1"}, {"RegionName": "us-east-1"}] 93 | } 94 | instance.list_buckets.return_value = {"Buckets": [{"Name": "bucket1"}]} 95 | 96 | error_reponse = { 97 | "Error": { 98 | "Code": "AuthorizationError", 99 | "Message": "User are not authorized perform SNS Notification service", 100 | } 101 | } 102 | instance.publish.side_effect = botocore.exceptions.ClientError(error_reponse, "") 103 | 104 | with pytest.raises(exceptions.AWSAuthorizationException): 105 | sns.SNSNotification(ctx, imagename).publish() 106 | 107 | 108 | @pytest.mark.parametrize( 109 | "imagename, partition, regions_in_partition, expected", 110 | [ 111 | ( 112 | "test-image-10", 113 | "aws-cn", 114 | ["cn-north1", "cn-northwest-1"], 115 | [], 116 | ), 117 | ( 118 | "test-image-11", 119 | "aws", 120 | ["us-east-1", "eu-central-1"], 121 | [ 122 | "arn:aws:sns:us-east-1:1234:topic1", 123 | "arn:aws:sns:eu-central-1:1234:topic2", 124 | ], 125 | ), 126 | ( 127 | "test-image-12", 128 | "aws", 129 | ["us-east-1", "eu-central-1"], 130 | [ 131 | "arn:aws:sns:us-east-1:1234:topic1", 132 | "arn:aws:sns:eu-central-1:1234:topic1", 133 | ], 134 | ), 135 | ], 136 | ) 137 | def test_sns__get_topic_arn(imagename, partition, regions_in_partition, expected): 138 | """ 139 | Test the send_notification logic 140 | """ 141 | with patch("boto3.client") as bclient_mock: 142 | instance = bclient_mock.return_value 143 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 144 | sns_conf = ctx.conf["images"][imagename]["sns"] 145 | instance.describe_regions.return_value = {"Regions": [{"RegionName": r} for r in regions_in_partition]} 146 | instance.list_buckets.return_value = {"Buckets": [{"Name": "bucket1"}]} 147 | 148 | instance.get_caller_identity.return_value = {"Account": "1234", "Arn": f"arn:{partition}:iam::1234:user/test"} 149 | 150 | topic_arns = [] 151 | for topic in sns_conf: 152 | for topic_name, topic_conf in topic.items(): 153 | sns_regions = sns.SNSNotification(ctx, imagename)._sns_regions(topic_conf) 154 | for region in sns_regions: 155 | res_arn = sns.SNSNotification(ctx, imagename)._get_topic_arn(topic_name, region) 156 | topic_arns.append(res_arn) 157 | 158 | assert topic_arns == expected 159 | 160 | 161 | @pytest.mark.parametrize( 162 | "imagename,regions_in_partition,regions_expected", 163 | [ 164 | ("test-image-10", ["us-east-1", "eu-west-1"], {"topic1": ["us-east-1"]}), 165 | ( 166 | "test-image-11", 167 | ["us-east-1", "eu-west-1"], 168 | {"topic1": ["us-east-1"], "topic2": []}, 169 | ), 170 | ("test-image-12", ["eu-northwest-1", "ap-southeast-1"], {"topic1": ["eu-northwest-1", "ap-southeast-1"]}), 171 | ], 172 | ) 173 | def test_sns_regions(imagename, regions_in_partition, regions_expected): 174 | """ 175 | Test the regions for a given image 176 | """ 177 | with patch("boto3.client") as bclient_mock: 178 | instance = bclient_mock.return_value 179 | instance.describe_regions.return_value = {"Regions": [{"RegionName": r} for r in regions_in_partition]} 180 | ctx = context.Context(curdir / "fixtures/config1.yaml", None) 181 | sns_conf = ctx.conf["images"][imagename]["sns"] 182 | instance.list_buckets.return_value = {"Buckets": [{"Name": "bucket1"}]} 183 | 184 | sns_regions = {} 185 | for topic in sns_conf: 186 | for topic_name, topic_conf in topic.items(): 187 | sns_regions[topic_name] = sns.SNSNotification(ctx, imagename)._sns_regions(topic_conf) 188 | 189 | assert sns_regions == regions_expected 190 | -------------------------------------------------------------------------------- /conf.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import requests 4 | from urllib.parse import urlparse 5 | from git import Repo, InvalidGitRepositoryError 6 | import time 7 | 8 | sys.path.append('./') 9 | from custom_conf import * 10 | sys.path.append('.sphinx/') 11 | from build_requirements import * 12 | 13 | # Configuration file for the Sphinx documentation builder. 14 | # You should not do any modifications to this file. Put your custom 15 | # configuration into the custom_conf.py file. 16 | # If you need to change this file, contribute the changes upstream. 17 | # 18 | # For the full list of built-in configuration values, see the documentation: 19 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 20 | 21 | ############################################################ 22 | ### Extensions 23 | ############################################################ 24 | 25 | extensions = [ 26 | 'sphinx_design', 27 | 'sphinx_copybutton', 28 | 'sphinxcontrib.jquery', 29 | ] 30 | 31 | # Only add redirects extension if any redirects are specified. 32 | if AreRedirectsDefined(): 33 | extensions.append('sphinx_reredirects') 34 | 35 | # Only add myst extensions if any configuration is present. 36 | if IsMyStParserUsed(): 37 | extensions.append('myst_parser') 38 | 39 | # Additional MyST syntax 40 | myst_enable_extensions = [ 41 | 'substitution', 42 | 'deflist', 43 | 'linkify' 44 | ] 45 | myst_enable_extensions.extend(custom_myst_extensions) 46 | 47 | # Only add Open Graph extension if any configuration is present. 48 | if IsOpenGraphConfigured(): 49 | extensions.append('sphinxext.opengraph') 50 | 51 | extensions.extend(custom_extensions) 52 | extensions = DeduplicateExtensions(extensions) 53 | 54 | ### Configuration for extensions 55 | 56 | # Used for related links 57 | if not 'discourse_prefix' in html_context and 'discourse' in html_context: 58 | html_context['discourse_prefix'] = html_context['discourse'] + '/t/' 59 | 60 | # The URL prefix for the notfound extension depends on whether the documentation uses versions. 61 | # For documentation on documentation.ubuntu.com, we also must add the slug. 62 | url_version = '' 63 | url_lang = '' 64 | 65 | # Determine if the URL uses versions and language 66 | if 'READTHEDOCS_CANONICAL_URL' in os.environ and os.environ['READTHEDOCS_CANONICAL_URL']: 67 | url_parts = os.environ['READTHEDOCS_CANONICAL_URL'].split('/') 68 | 69 | if len(url_parts) >= 2 and 'READTHEDOCS_VERSION' in os.environ and os.environ['READTHEDOCS_VERSION'] == url_parts[-2]: 70 | url_version = url_parts[-2] + '/' 71 | 72 | if len(url_parts) >= 3 and 'READTHEDOCS_LANGUAGE' in os.environ and os.environ['READTHEDOCS_LANGUAGE'] == url_parts[-3]: 73 | url_lang = url_parts[-3] + '/' 74 | 75 | # Set notfound_urls_prefix to the slug (if defined) and the version/language affix 76 | if slug: 77 | notfound_urls_prefix = '/' + slug + '/' + url_lang + url_version 78 | elif len(url_lang + url_version) > 0: 79 | notfound_urls_prefix = '/' + url_lang + url_version 80 | else: 81 | notfound_urls_prefix = '' 82 | 83 | notfound_context = { 84 | 'title': 'Page not found', 85 | 'body': '

Sorry, but the documentation page that you are looking for was not found.

\n\n

Documentation changes over time, and pages are moved around. We try to redirect you to the updated content where possible, but unfortunately, that didn\'t work this time (maybe because the content you were looking for does not exist in this version of the documentation).

\n

You can try to use the navigation to locate the content you\'re looking for, or search for a similar page.

\n', 86 | } 87 | 88 | # Default image for OGP (to prevent font errors, see 89 | # https://github.com/canonical/sphinx-docs-starter-pack/pull/54 ) 90 | if not 'ogp_image' in locals(): 91 | ogp_image = 'https://assets.ubuntu.com/v1/253da317-image-document-ubuntudocs.svg' 92 | 93 | ############################################################ 94 | ### General configuration 95 | ############################################################ 96 | 97 | exclude_patterns = [ 98 | '_build', 99 | 'Thumbs.db', 100 | '.DS_Store', 101 | '.sphinx', 102 | '.tox' 103 | ] 104 | exclude_patterns.extend(custom_excludes) 105 | 106 | rst_epilog = ''' 107 | .. include:: /reuse/links.txt 108 | ''' 109 | if 'custom_rst_epilog' in locals(): 110 | rst_epilog = custom_rst_epilog 111 | 112 | source_suffix = { 113 | '.rst': 'restructuredtext', 114 | '.md': 'markdown', 115 | } 116 | 117 | if not 'conf_py_path' in html_context and 'github_folder' in html_context: 118 | html_context['conf_py_path'] = html_context['github_folder'] 119 | 120 | # For ignoring specific links 121 | linkcheck_anchors_ignore_for_url = [ 122 | r'https://github\.com/.*' 123 | ] 124 | linkcheck_anchors_ignore_for_url.extend(custom_linkcheck_anchors_ignore_for_url) 125 | 126 | # Tags cannot be added directly in custom_conf.py, so add them here 127 | for tag in custom_tags: 128 | tags.add(tag) 129 | 130 | # html_context['get_contribs'] is a function and cannot be 131 | # cached (see https://github.com/sphinx-doc/sphinx/issues/12300) 132 | suppress_warnings = ["config.cache"] 133 | 134 | ############################################################ 135 | ### Styling 136 | ############################################################ 137 | 138 | # Find the current builder 139 | builder = 'dirhtml' 140 | if '-b' in sys.argv: 141 | builder = sys.argv[sys.argv.index('-b')+1] 142 | 143 | # Setting templates_path for epub makes the build fail 144 | if builder == 'dirhtml' or builder == 'html': 145 | templates_path = ['_templates'] 146 | notfound_template = '404.html' 147 | 148 | # Theme configuration 149 | html_theme = 'furo' 150 | html_last_updated_fmt = '' 151 | html_permalinks_icon = '¶' 152 | 153 | if html_title == '': 154 | html_theme_options = { 155 | 'sidebar_hide_name': True 156 | } 157 | 158 | ############################################################ 159 | ### Additional files 160 | ############################################################ 161 | 162 | html_static_path = ['_static'] 163 | 164 | html_css_files = [ 165 | 'custom.css', 166 | 'header.css', 167 | 'github_issue_links.css', 168 | 'furo_colors.css', 169 | 'footer.css', 170 | 'css/cookie-banner.css', 171 | ] 172 | html_css_files.extend(custom_html_css_files) 173 | 174 | html_js_files = ['header-nav.js', 'footer.js', 'js/bundle.js'] 175 | if 'github_issues' in html_context and html_context['github_issues'] and not disable_feedback_button: 176 | html_js_files.append('github_issue_links.js') 177 | html_js_files.extend(custom_html_js_files) 178 | 179 | ############################################################# 180 | # Display the contributors 181 | 182 | def get_contributors_for_file(github_url, github_folder, pagename, page_source_suffix, display_contributors_since=None): 183 | filename = f"{pagename}{page_source_suffix}" 184 | paths=html_context['github_folder'][1:] + filename 185 | 186 | try: 187 | repo = Repo(".") 188 | except InvalidGitRepositoryError: 189 | cwd = os.getcwd() 190 | ghfolder = html_context['github_folder'][:-1] 191 | if ghfolder and cwd.endswith(ghfolder): 192 | repo = Repo(cwd.rpartition(ghfolder)[0]) 193 | else: 194 | print("The local Git repository could not be found.") 195 | return 196 | 197 | since = display_contributors_since if display_contributors_since and display_contributors_since.strip() else None 198 | 199 | commits = repo.iter_commits(paths=paths, since=since) 200 | 201 | contributors_dict = {} 202 | for commit in commits: 203 | contributor = commit.author.name 204 | if contributor not in contributors_dict or commit.committed_date > contributors_dict[contributor]['date']: 205 | contributors_dict[contributor] = { 206 | 'date': commit.committed_date, 207 | 'sha': commit.hexsha 208 | } 209 | # The github_page contains the link to the contributor's latest commit. 210 | contributors_list = [{'name': name, 'github_page': f"{github_url}/commit/{data['sha']}"} for name, data in contributors_dict.items()] 211 | sorted_contributors_list = sorted(contributors_list, key=lambda x: x['name']) 212 | return sorted_contributors_list 213 | 214 | html_context['get_contribs'] = get_contributors_for_file 215 | ############################################################# 216 | -------------------------------------------------------------------------------- /custom_conf.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from requirements_parser import _get_requirements 3 | 4 | # Custom configuration for the Sphinx documentation builder. 5 | # All configuration specific to your project should be done in this file. 6 | # 7 | # The file is included in the common conf.py configuration file. 8 | # You can modify any of the settings below or add any configuration that 9 | # is not covered by the common conf.py file. 10 | # 11 | # For the full list of built-in configuration values, see the documentation: 12 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 13 | # 14 | # If you're not familiar with Sphinx and don't want to use advanced 15 | # features, it is sufficient to update the settings in the "Project 16 | # information" section. 17 | 18 | ############################################################ 19 | ### Project information 20 | ############################################################ 21 | 22 | # Product name 23 | project = 'awspub' 24 | author = 'Canonical Ltd.' 25 | 26 | # The title you want to display for the documentation in the sidebar. 27 | # You might want to include a version number here. 28 | # To not display any title, set this option to an empty string. 29 | html_title = project + ' documentation' 30 | 31 | # The default value uses CC-BY-SA as the license and the current year 32 | # as the copyright year. 33 | # 34 | # If your documentation needs a different copyright license, use that 35 | # instead of 'CC-BY-SA'. Also, if your documentation is included as 36 | # part of the code repository of your project, it'll inherit the license 37 | # of the code. So you'll need to specify that license here (instead of 38 | # 'CC-BY-SA'). 39 | # 40 | # For static works, it is common to provide the year of first publication. 41 | # Another option is to give the first year and the current year 42 | # for documentation that is often changed, e.g. 2022–2023 (note the en-dash). 43 | # 44 | # A way to check a GitHub repo's creation date is to obtain a classic GitHub 45 | # token with 'repo' permissions here: https://github.com/settings/tokens 46 | # Next, use 'curl' and 'jq' to extract the date from the GitHub API's output: 47 | # 48 | # curl -H 'Authorization: token ' \ 49 | # -H 'Accept: application/vnd.github.v3.raw' \ 50 | # https://api.github.com/repos/canonical/ | jq '.created_at' 51 | 52 | copyright = '%s CC-BY-SA, %s' % (datetime.date.today().year, author) 53 | 54 | ## Open Graph configuration - defines what is displayed as a link preview 55 | ## when linking to the documentation from another website (see https://ogp.me/) 56 | # The URL where the documentation will be hosted (leave empty if you 57 | # don't know yet) 58 | # NOTE: If no ogp_* variable is defined (e.g. if you remove this section) the 59 | # sphinxext.opengraph extension will be disabled. 60 | ogp_site_url = 'https://canonical-awspub.readthedocs-hosted.com/' 61 | # The documentation website name (usually the same as the product name) 62 | ogp_site_name = project 63 | # The URL of an image or logo that is used in the preview 64 | ogp_image = 'https://assets.ubuntu.com/v1/253da317-image-document-ubuntudocs.svg' 65 | 66 | # Update with the local path to the favicon for your product 67 | # (default is the circle of friends) 68 | html_favicon = '_static/favicon.png' 69 | 70 | # (Some settings must be part of the html_context dictionary, while others 71 | # are on root level. Don't move the settings.) 72 | html_context = { 73 | 74 | # Change to the link to the website of your product (without "https://") 75 | # For example: "ubuntu.com/lxd" or "microcloud.is" 76 | # If there is no product website, edit the header template to remove the 77 | # link (see the readme for instructions). 78 | 'product_page': 'github.com/canonical/awspub', 79 | 80 | # Add your product tag (the orange part of your logo, will be used in the 81 | # header) to ".sphinx/_static" and change the path here (start with "_static") 82 | # (default is the circle of friends) 83 | 'product_tag': '_static/tag.png', 84 | 85 | # Change to the discourse instance you want to be able to link to 86 | # using the :discourse: metadata at the top of a file 87 | # (use an empty value if you don't want to link) 88 | 'discourse': '', 89 | 90 | # Change to the Mattermost channel you want to link to 91 | # (use an empty value if you don't want to link) 92 | 'mattermost': '', 93 | 94 | # Change to the Matrix channel you want to link to 95 | # (use an empty value if you don't want to link) 96 | 'matrix': '', 97 | 98 | # Change to the GitHub URL for your project 99 | # This is used, for example, to link to the source files and allow creating GitHub issues directly from the documentation. 100 | 'github_url': 'https://github.com/canonical/awspub', 101 | 102 | # Change to the branch for this version of the documentation 103 | 'github_version': 'main', 104 | 105 | # Change to the folder that contains the documentation 106 | # (usually "/" or "/docs/") 107 | 'github_folder': '/docs/', 108 | 109 | # Change to an empty value if your GitHub repo doesn't have issues enabled. 110 | # This will disable the feedback button and the issue link in the footer. 111 | 'github_issues': 'enabled', 112 | 113 | # Change to the folder that contains the documentation 114 | # (usually "/" or "/docs/") 115 | "conf_py_path": '/docs/', 116 | 117 | # Controls the existence of Previous / Next buttons at the bottom of pages 118 | # Valid options: none, prev, next, both 119 | 'sequential_nav': "none", 120 | 121 | # Controls if to display the contributors of a file or not 122 | "display_contributors": True, 123 | 124 | # Controls time frame for showing the contributors 125 | "display_contributors_since": "" 126 | } 127 | 128 | # If your project is on documentation.ubuntu.com, specify the project 129 | # slug (for example, "lxd") here. 130 | slug = "" 131 | 132 | ############################################################ 133 | ### Redirects 134 | ############################################################ 135 | 136 | # Set up redirects (https://documatt.gitlab.io/sphinx-reredirects/usage.html) 137 | # For example: 'explanation/old-name.html': '../how-to/prettify.html', 138 | # You can also configure redirects in the Read the Docs project dashboard 139 | # (see https://docs.readthedocs.io/en/stable/guides/redirects.html). 140 | # NOTE: If this variable is not defined, set to None, or the dictionary is empty, 141 | # the sphinx_reredirects extension will be disabled. 142 | redirects = {} 143 | 144 | ############################################################ 145 | ### Link checker exceptions 146 | ############################################################ 147 | 148 | # Links to ignore when checking links 149 | linkcheck_ignore = [ 150 | 'http://127.0.0.1:8000' 151 | ] 152 | 153 | # Pages on which to ignore anchors 154 | # (This list will be appended to linkcheck_anchors_ignore_for_url) 155 | custom_linkcheck_anchors_ignore_for_url = [] 156 | 157 | ############################################################ 158 | ### Additions to default configuration 159 | ############################################################ 160 | 161 | ## The following settings are appended to the default configuration. 162 | ## Use them to extend the default functionality. 163 | 164 | # Remove this variable to disable the MyST parser extensions. 165 | custom_myst_extensions = [] 166 | 167 | # Add custom Sphinx extensions as needed. 168 | # This array contains recommended extensions that should be used. 169 | # NOTE: The following extensions are handled automatically and do 170 | # not need to be added here: myst_parser, sphinx_copybutton, sphinx_design, 171 | # sphinx_reredirects, sphinxcontrib.jquery, sphinxext.opengraph 172 | custom_extensions = [ 173 | 'sphinx_tabs.tabs', 174 | 'canonical.youtube-links', 175 | 'canonical.related-links', 176 | 'canonical.custom-rst-roles', 177 | 'canonical.terminal-output', 178 | 'notfound.extension', 179 | 'sphinx.ext.autodoc', 180 | 'sphinxcontrib.autodoc_pydantic', 181 | ] 182 | 183 | # Add custom required Python modules that must be added to the 184 | # .sphinx/requirements.txt file. 185 | # NOTE: The following modules are handled automatically and do not need to be 186 | # added here: canonical-sphinx-extensions, furo, linkify-it-py, myst-parser, 187 | # pyspelling, sphinx, sphinx-autobuild, sphinx-copybutton, sphinx-design, 188 | # sphinx-notfound-page, sphinx-reredirects, sphinx-tabs, sphinxcontrib-jquery, 189 | # sphinxext-opengraph 190 | custom_required_modules = [ 191 | "autodoc_pydantic", 192 | "pydantic-settings", 193 | ] 194 | custom_required_modules.extend(_get_requirements()) 195 | 196 | # Add files or directories that should be excluded from processing. 197 | custom_excludes = [ 198 | 'doc-cheat-sheet*', 199 | '.github', 200 | 'readme.rst', 201 | 'release-management.rst', 202 | 'SECURITY.md', 203 | ] 204 | 205 | # Add CSS files (located in .sphinx/_static/) 206 | custom_html_css_files = [] 207 | 208 | # Add JavaScript files (located in .sphinx/_static/) 209 | custom_html_js_files = [] 210 | 211 | ## The following settings override the default configuration. 212 | 213 | # Specify a reST string that is included at the end of each file. 214 | # If commented out, use the default (which pulls the reuse/links.txt 215 | # file into each reST file). 216 | # custom_rst_epilog = '' 217 | 218 | # By default, the documentation includes a feedback button at the top. 219 | # You can disable it by setting the following configuration to True. 220 | disable_feedback_button = False 221 | 222 | # Add tags that you want to use for conditional inclusion of text 223 | # (https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#tags) 224 | custom_tags = [] 225 | 226 | # If you are using the :manpage: role, set this variable to the URL for the version 227 | # that you want to link to: 228 | # manpages_url = "https://manpages.ubuntu.com/manpages/noble/en/man{section}/{page}.{section}.html" 229 | 230 | ############################################################ 231 | ### Additional configuration 232 | ############################################################ 233 | 234 | ## Add any configuration that is not covered by the common conf.py file. 235 | 236 | # Define a :center: role that can be used to center the content of table cells. 237 | rst_prolog = ''' 238 | .. role:: center 239 | :class: align-center 240 | ''' 241 | -------------------------------------------------------------------------------- /awspub/snapshot.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Dict, List, Optional 3 | 4 | import boto3 5 | from mypy_boto3_ec2.client import EC2Client 6 | 7 | from awspub import exceptions 8 | from awspub.context import Context 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | class Snapshot: 14 | """ 15 | Handle EC2 Snapshot API interaction 16 | """ 17 | 18 | def __init__(self, context: Context): 19 | self._ctx: Context = context 20 | 21 | def _get(self, ec2client: EC2Client, snapshot_name: str) -> Optional[str]: 22 | """ 23 | Get the snapshot id for the given name or None 24 | 25 | :param ec2client: EC2 client for a specific region 26 | :type ec2client: EC2Client 27 | :param snapshot_name: the Snapshot name 28 | :type snapshot_name: str 29 | :return: Either None or a snapshot-id 30 | :rtype: Optional[str] 31 | """ 32 | resp = ec2client.describe_snapshots( 33 | Filters=[ 34 | { 35 | "Name": "tag:Name", 36 | "Values": [ 37 | snapshot_name, 38 | ], 39 | }, 40 | { 41 | "Name": "status", 42 | "Values": [ 43 | "pending", 44 | "completed", 45 | ], 46 | }, 47 | ], 48 | OwnerIds=["self"], 49 | ) 50 | if len(resp.get("Snapshots", [])) == 1: 51 | return resp["Snapshots"][0]["SnapshotId"] 52 | elif len(resp.get("Snapshots", [])) == 0: 53 | return None 54 | else: 55 | raise exceptions.MultipleSnapshotsException( 56 | f"Found {len(resp.get('Snapshots', []))} snapshots with " 57 | f"name '{snapshot_name}' in region {ec2client.meta.region_name}" 58 | ) 59 | 60 | def _get_import_snapshot_task(self, ec2client: EC2Client, snapshot_name: str) -> Optional[str]: 61 | """ 62 | Get a import snapshot task for the given name 63 | 64 | :param ec2client: EC2 client for a specific region 65 | :type ec2client: EC2Client 66 | :param snapshot_name: the Snapshot name 67 | :type snapshot_name: str 68 | :return: Either None or a import-snapshot-task-id 69 | :rtype: Optional[str] 70 | """ 71 | resp = ec2client.describe_import_snapshot_tasks( 72 | Filters=[ 73 | { 74 | "Name": "tag:Name", 75 | "Values": [ 76 | snapshot_name, 77 | ], 78 | } 79 | ] 80 | ) 81 | # API doesn't support filters by status so filter here 82 | tasks: List = resp.get("ImportSnapshotTasks", []) 83 | # we already know here that the snapshot does not exist (checked in create() before calling this 84 | # function). so ignore "deleted" or "completed" tasks here 85 | # it might happen (for whatever reason) that a task got completed but the snapshot got deleted 86 | # afterwards. In that case a "completed" task for the given snapshot_name exists but 87 | # that doesn't help so ignore it 88 | tasks = [t for t in tasks if t["SnapshotTaskDetail"]["Status"] not in ["deleted", "completed"]] 89 | if len(tasks) == 1: 90 | return tasks[0]["ImportTaskId"] 91 | elif len(tasks) == 0: 92 | return None 93 | else: 94 | raise exceptions.MultipleImportSnapshotTasksException( 95 | f"Found {len(tasks)} import snapshot tasks with " 96 | f"name '{snapshot_name}' in region {ec2client.meta.region_name}" 97 | ) 98 | 99 | def create(self, ec2client: EC2Client, snapshot_name: str) -> str: 100 | """ 101 | Create a EC2 snapshot with the given name 102 | If the snapshot already exists, just return the snapshot-id for the existing snapshot. 103 | 104 | :param ec2client: EC2 client for a specific region 105 | :type ec2client: EC2Client 106 | :param snapshot_name: the Snapshot name 107 | :type snapshot_name: str 108 | :return: a snapshot-id 109 | :rtype: str 110 | """ 111 | # does a snapshot with the given name already exists? 112 | snap_id: Optional[str] = self._get(ec2client, snapshot_name) 113 | if snap_id: 114 | logger.info(f"snapshot with name '{snapshot_name}' already exists in region {ec2client.meta.region_name}") 115 | return snap_id 116 | 117 | logger.info( 118 | f"Create snapshot from bucket '{self._ctx.conf['s3']['bucket_name']}' " 119 | f"for '{snapshot_name}' in region {ec2client.meta.region_name}" 120 | ) 121 | 122 | # extend tags 123 | tags = self._ctx.tags 124 | tags.append({"Key": "Name", "Value": snapshot_name}) 125 | 126 | # does a import snapshot task with the given name already exist? 127 | import_snapshot_task_id: Optional[str] = self._get_import_snapshot_task(ec2client, snapshot_name) 128 | if import_snapshot_task_id: 129 | logger.info( 130 | f"import snapshot task ({import_snapshot_task_id}) with " 131 | f"name '{snapshot_name}' exists in region {ec2client.meta.region_name}" 132 | ) 133 | else: 134 | resp = ec2client.import_snapshot( 135 | Description="Import ", 136 | DiskContainer={ 137 | "Description": "", 138 | "Format": "vmdk", 139 | "UserBucket": { 140 | "S3Bucket": self._ctx.conf["s3"]["bucket_name"], 141 | "S3Key": self._ctx.source_sha256, 142 | }, 143 | }, 144 | TagSpecifications=[ 145 | {"ResourceType": "import-snapshot-task", "Tags": tags}, 146 | ], 147 | ) 148 | import_snapshot_task_id = resp["ImportTaskId"] 149 | 150 | logger.info( 151 | f"Waiting for snapshot import task (id: {import_snapshot_task_id}) " 152 | f"in region {ec2client.meta.region_name} ..." 153 | ) 154 | 155 | waiter_import = ec2client.get_waiter("snapshot_imported") 156 | waiter_import.wait(ImportTaskIds=[import_snapshot_task_id], WaiterConfig={"Delay": 30, "MaxAttempts": 90}) 157 | 158 | task_details = ec2client.describe_import_snapshot_tasks(ImportTaskIds=[import_snapshot_task_id]) 159 | snapshot_id = task_details["ImportSnapshotTasks"][0]["SnapshotTaskDetail"]["SnapshotId"] 160 | 161 | # create tags before waiting for completion so the tags are already there 162 | ec2client.create_tags(Resources=[snapshot_id], Tags=tags) 163 | 164 | waiter_completed = ec2client.get_waiter("snapshot_completed") 165 | waiter_completed.wait(SnapshotIds=[snapshot_id], WaiterConfig={"Delay": 30, "MaxAttempts": 60}) 166 | 167 | logger.info(f"Snapshot import as '{snapshot_id}' in region {ec2client.meta.region_name} done") 168 | return snapshot_id 169 | 170 | def _copy(self, snapshot_name: str, source_region: str, destination_region: str) -> str: 171 | """ 172 | Copy a EC2 snapshot for the given context to the destination region 173 | NOTE: we don't wait for the snapshot to complete here! 174 | 175 | :param snapshot_name: the Snapshot name to copy 176 | :type snapshot_name: str 177 | :param source_region: a region to copy the snapshot from 178 | :type source_region: str 179 | :param destination_region: a region to copy the snapshot to 180 | :type destionation_region: str 181 | 182 | :return: the existing or created snapshot-id 183 | :rtype: str 184 | """ 185 | 186 | # does the snapshot with that name already exist in the destination region? 187 | ec2client_dest: EC2Client = boto3.client("ec2", region_name=destination_region) 188 | snapshot_id: Optional[str] = self._get(ec2client_dest, snapshot_name) 189 | if snapshot_id: 190 | logger.info( 191 | f"snapshot with name '{snapshot_name}' already " 192 | f"exists ({snapshot_id}) in destination region {ec2client_dest.meta.region_name}" 193 | ) 194 | return snapshot_id 195 | 196 | ec2client_source: EC2Client = boto3.client("ec2", region_name=source_region) 197 | source_snapshot_id: Optional[str] = self._get(ec2client_source, snapshot_name) 198 | if not source_snapshot_id: 199 | raise ValueError( 200 | f"Can not find source snapshot with name '{snapshot_name}' " 201 | f"in region {ec2client_source.meta.region_name}" 202 | ) 203 | 204 | logger.info(f"Copy snapshot {source_snapshot_id} from " f"{source_region} to {destination_region}") 205 | # extend tags 206 | tags = self._ctx.tags 207 | tags.append({"Key": "Name", "Value": snapshot_name}) 208 | resp = ec2client_dest.copy_snapshot( 209 | SourceRegion=source_region, 210 | SourceSnapshotId=source_snapshot_id, 211 | TagSpecifications=[{"ResourceType": "snapshot", "Tags": tags}], 212 | ) 213 | 214 | # note: we don't wait for the snapshot to complete here! 215 | return resp["SnapshotId"] 216 | 217 | def copy(self, snapshot_name: str, source_region: str, destination_regions: List[str]) -> Dict[str, str]: 218 | """ 219 | Copy a snapshot to multiple regions 220 | 221 | :param snapshot_name: the Snapshot name to copy 222 | :type snapshot_name: str 223 | :param source_region: a region to copy the snapshot from 224 | :type source_region: str 225 | :param destination_regions: a list of regions to copy the snaphot to 226 | :type destionation_regions: List[str] 227 | :return: a dict with region/snapshot-id mapping for the newly copied snapshots 228 | :rtype: Dict[str, str] where the key is a region name and the value a snapshot-id 229 | """ 230 | snapshot_ids: Dict[str, str] = dict() 231 | for destination_region in destination_regions: 232 | snapshot_ids[destination_region] = self._copy(snapshot_name, source_region, destination_region) 233 | 234 | logger.info(f"Waiting for {len(snapshot_ids)} snapshots to appear in the destination regions ...") 235 | for destination_region, snapshot_id in snapshot_ids.items(): 236 | ec2client_dest = boto3.client("ec2", region_name=destination_region) 237 | waiter = ec2client_dest.get_waiter("snapshot_completed") 238 | logger.info(f"Waiting for {snapshot_id} in {ec2client_dest.meta.region_name} to complete ...") 239 | waiter.wait(SnapshotIds=[snapshot_id], WaiterConfig={"Delay": 30, "MaxAttempts": 90}) 240 | 241 | return snapshot_ids 242 | -------------------------------------------------------------------------------- /awspub/configmodels.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import re 3 | from enum import Enum 4 | from typing import Dict, List, Literal, Optional 5 | 6 | from pydantic import BaseModel, ConfigDict, Field, field_validator 7 | 8 | from awspub.common import _split_partition 9 | 10 | 11 | class ConfigS3Model(BaseModel): 12 | """ 13 | S3 configuration. 14 | This is required for uploading source files (usually .vmdk) to a bucket so 15 | snapshots can be created out of the s3 file 16 | """ 17 | 18 | model_config = ConfigDict(extra="forbid") 19 | 20 | bucket_name: str = Field(description="The S3 bucket name") 21 | 22 | 23 | class ConfigSourceModel(BaseModel): 24 | """ 25 | Source configuration. 26 | This defines the source (usually .vmdk) that is uploaded 27 | to S3 and then used to create EC2 snapshots in different regions. 28 | """ 29 | 30 | model_config = ConfigDict(extra="forbid") 31 | 32 | path: pathlib.Path = Field(description="Path to a local .vmdk image") 33 | architecture: Literal["x86_64", "arm64"] = Field(description="The architecture of the given .vmdk image") 34 | 35 | 36 | class ConfigImageMarketplaceSecurityGroupModel(BaseModel): 37 | """ 38 | Image/AMI Marketplace specific configuration for a security group 39 | """ 40 | 41 | model_config = ConfigDict(extra="forbid") 42 | 43 | from_port: int = Field(description="The source port") 44 | ip_protocol: Literal["tcp", "udp"] = Field(description="The IP protocol (either 'tcp' or 'udp')") 45 | ip_ranges: List[str] = Field(description="IP ranges to allow, in CIDR format (eg. '192.0.2.0/24')") 46 | to_port: int = Field(description="The destination port") 47 | 48 | 49 | class ConfigImageMarketplaceModel(BaseModel): 50 | """ 51 | Image/AMI Marketplace specific configuration to request new Marketplace versions 52 | See https://docs.aws.amazon.com/marketplace-catalog/latest/api-reference/ami-products.html 53 | for further information 54 | """ 55 | 56 | model_config = ConfigDict(extra="forbid") 57 | 58 | entity_id: str = Field(description="The entity ID (product ID)") 59 | # see https://docs.aws.amazon.com/marketplace/latest/userguide/ami-single-ami-products.html#single-ami-marketplace-ami-access # noqa:E501 60 | access_role_arn: str = Field( 61 | description="IAM role Amazon Resource Name (ARN) used by AWS Marketplace to access the provided AMI" 62 | ) 63 | version_title: str = Field(description="The version title. Must be unique across the product") 64 | release_notes: str = Field(description="The release notes") 65 | user_name: str = Field(description="The login username to access the operating system") 66 | scanning_port: int = Field(description="Port to access the operating system (default: 22)", default=22) 67 | os_name: str = Field(description="Operating system name displayed to Marketplace buyers") 68 | os_version: str = Field(description="Operating system version displayed to Marketplace buyers") 69 | usage_instructions: str = Field( 70 | description=" Instructions for using the AMI, or a link to more information about the AMI" 71 | ) 72 | recommended_instance_type: str = Field( 73 | description="The instance type that is recommended to run the service with the AMI and is the " 74 | "default for 1-click installs of your service" 75 | ) 76 | security_groups: Optional[List[ConfigImageMarketplaceSecurityGroupModel]] 77 | 78 | 79 | class ConfigImageSSMParameterModel(BaseModel): 80 | """ 81 | Image/AMI SSM specific configuration to push parameters of type `aws:ec2:image` to the parameter store 82 | """ 83 | 84 | model_config = ConfigDict(extra="forbid") 85 | 86 | name: str = Field( 87 | description="The fully qualified name of the parameter that you want to add to the system. " 88 | "A parameter name must be unique within an Amazon Web Services Region" 89 | ) 90 | description: Optional[str] = Field( 91 | description="Information about the parameter that you want to add to the system", default=None 92 | ) 93 | allow_overwrite: Optional[bool] = Field( 94 | description="allow to overwrite an existing parameter. Useful for keep a 'latest' parameter (default: false)", 95 | default=False, 96 | ) 97 | 98 | 99 | class SNSNotificationProtocol(str, Enum): 100 | DEFAULT = "default" 101 | EMAIL = "email" 102 | 103 | 104 | class ConfigImageSNSNotificationModel(BaseModel): 105 | """ 106 | Image/AMI SNS Notification specific configuration to notify subscribers about new images availability 107 | """ 108 | 109 | model_config = ConfigDict(extra="forbid") 110 | 111 | subject: str = Field(description="The subject of SNS Notification", min_length=1, max_length=99) 112 | message: Dict[SNSNotificationProtocol, str] = Field( 113 | description="The body of the message to be sent to subscribers.", 114 | default={SNSNotificationProtocol.DEFAULT: ""}, 115 | ) 116 | regions: Optional[List[str]] = Field( 117 | description="Optional list of regions for sending notification. If not given, regions where the image " 118 | "registered will be used from the currently used parition. If a region doesn't exist in the currently " 119 | "used partition, it will be ignored.", 120 | default=None, 121 | ) 122 | 123 | @field_validator("message") 124 | def check_message(cls, value): 125 | # Check message protocols have default key 126 | # Message should contain at least a top-level JSON key of “default” 127 | # with a value that is a string 128 | if SNSNotificationProtocol.DEFAULT not in value: 129 | raise ValueError(f"{SNSNotificationProtocol.DEFAULT.value} key is required to send SNS notification") 130 | return value 131 | 132 | 133 | class ConfigImageModel(BaseModel): 134 | """ 135 | Image/AMI configuration. 136 | """ 137 | 138 | model_config = ConfigDict(extra="forbid") 139 | 140 | description: Optional[str] = Field(description="Optional image description", default=None) 141 | regions: Optional[List[str]] = Field( 142 | description="Optional list of regions for this image. If not given, all available regions will" 143 | "be used from the currently used partition. If a region doesn't exist in the currently used partition," 144 | " it will be ignored.", 145 | default=None, 146 | ) 147 | separate_snapshot: bool = Field(description="Use a separate snapshot for this image?", default=False) 148 | billing_products: Optional[List[str]] = Field(description="Optional list of billing codes", default=None) 149 | boot_mode: Literal["legacy-bios", "uefi", "uefi-preferred"] = Field( 150 | description="The boot mode. For arm64, this needs to be 'uefi'" 151 | ) 152 | root_device_name: Optional[str] = Field(description="The root device name", default="/dev/sda1") 153 | root_device_volume_type: Optional[Literal["gp2", "gp3"]] = Field( 154 | description="The root device volume type", default="gp3" 155 | ) 156 | root_device_volume_size: Optional[int] = Field(description="The root device volume size (in GB)", default=8) 157 | uefi_data: Optional[pathlib.Path] = Field( 158 | description="Optional path to a non-volatile UEFI variable store (must be already base64 encoded)", default=None 159 | ) 160 | tpm_support: Optional[Literal["v2.0"]] = Field( 161 | description="Optional TPM support. If this is set, 'boot_mode' must be 'uefi'", default=None 162 | ) 163 | imds_support: Optional[Literal["v2.0"]] = Field(description="Optional IMDS support", default=None) 164 | share: Optional[List[str]] = Field( 165 | description="Optional list of account IDs, organization ARN, OU ARN the image and snapshot will be shared with." 166 | " The account ID can be prefixed with the partition and separated by ':'. Eg 'aws-cn:123456789123'", 167 | default=None, 168 | ) 169 | temporary: Optional[bool] = Field( 170 | description="Optional boolean field indicates that a image is only temporary", default=False 171 | ) 172 | public: Optional[bool] = Field( 173 | description="Optional boolean field indicates if the image should be public", default=False 174 | ) 175 | marketplace: Optional[ConfigImageMarketplaceModel] = Field( 176 | description="Optional structure containing Marketplace related configuration for the commercial " 177 | "'aws' partition", 178 | default=None, 179 | ) 180 | ssm_parameter: Optional[List[ConfigImageSSMParameterModel]] = Field( 181 | description="Optional list of SSM parameter paths of type `aws:ec2:image` which will " 182 | "be pushed to the parameter store", 183 | default=None, 184 | ) 185 | groups: Optional[List[str]] = Field(description="Optional list of groups this image is part of", default=[]) 186 | tags: Optional[Dict[str, str]] = Field(description="Optional Tags to apply to this image only", default={}) 187 | sns: Optional[List[Dict[str, ConfigImageSNSNotificationModel]]] = Field( 188 | description="Optional list of SNS Notification related configuration", default=None 189 | ) 190 | 191 | @field_validator("share") 192 | @classmethod 193 | def check_share(cls, v: Optional[List[str]]) -> Optional[List[str]]: 194 | """ 195 | Make sure the account IDs are valid and if given the partition is correct 196 | """ 197 | patterns = [ 198 | # https://docs.aws.amazon.com/organizations/latest/APIReference/API_Account.html 199 | r"\d{12}", 200 | # Adjusted for partitions 201 | # https://docs.aws.amazon.com/organizations/latest/APIReference/API_Organization.html 202 | r"arn:aws(?:-cn)?(?:-us-gov)?:organizations::\d{12}:organization\/o-[a-z0-9]{10,32}", 203 | # https://docs.aws.amazon.com/organizations/latest/APIReference/API_OrganizationalUnit.html 204 | r"arn:aws(?:-cn)?(?:-us-gov)?:organizations::\d{12}:ou\/o-[a-z0-9]{10,32}\/ou-[0-9a-z]{4,32}-[0-9a-z]{8,32}", # noqa:E501 205 | ] 206 | if v is not None: 207 | for val in v: 208 | partition, account_id_or_arn = _split_partition(val) 209 | valid = False 210 | for pattern in patterns: 211 | if re.fullmatch(pattern, account_id_or_arn): 212 | valid = True 213 | break 214 | if not valid: 215 | raise ValueError("Account ID must be 12 digits long or an ARN for Organization or OU") 216 | if partition not in ["aws", "aws-cn", "aws-us-gov"]: 217 | raise ValueError("Partition must be one of 'aws', 'aws-cn', 'aws-us-gov'") 218 | return v 219 | 220 | 221 | class ConfigModel(BaseModel): 222 | """ 223 | The base model for the whole configuration 224 | """ 225 | 226 | model_config = ConfigDict(extra="forbid") 227 | 228 | s3: ConfigS3Model 229 | source: ConfigSourceModel 230 | images: Dict[str, ConfigImageModel] 231 | tags: Optional[Dict[str, str]] = Field(description="Optional Tags to apply to all resources", default={}) 232 | -------------------------------------------------------------------------------- /docs/how_to/publish.rst: -------------------------------------------------------------------------------- 1 | How to publish images 2 | ===================== 3 | 4 | To publish a VMDK file as an image (AMI) on AWS EC2 with `awspub`, 5 | a config file is required. 6 | A basic config file (say ``config.yaml``) looks like: 7 | 8 | .. literalinclude:: ../config-samples/config-minimal.yaml 9 | :language: yaml 10 | 11 | 12 | To publish the image with the name `my-custom-image`, run: 13 | 14 | .. code-block:: shell 15 | 16 | awspub create config.yaml 17 | [snipped output] 18 | { 19 | "images": { 20 | "my-custom-image": { 21 | "ap-south-2": "ami-015fa46e6ec690c8e", 22 | "ap-south-1": "ami-0fd9238a64ea231d0", 23 | "eu-south-1": "ami-0cbb4771743cc81fe", 24 | "eu-south-2": "ami-0067ee557befd09c2", 25 | "me-central-1": "ami-023fa019e0ce98e91", 26 | "il-central-1": "ami-092d3f2a7677b8cf1", 27 | "ca-central-1": "ami-0d2e897cd1ebecc45", 28 | "eu-central-1": "ami-0b9ed498e040c69e2", 29 | "eu-central-2": "ami-0fb0f61690e55ab8e", 30 | "us-west-1": "ami-069c013403cc15c2f", 31 | "us-west-2": "ami-06f9d32912a83571b", 32 | "af-south-1": "ami-0371f67e8905c045a", 33 | "eu-north-1": "ami-00710b821b31f5c78", 34 | "eu-west-3": "ami-08b74828e79d0a405", 35 | "eu-west-2": "ami-0f6f9c073bdb7b731", 36 | "eu-west-1": "ami-0a07629b25777bf07", 37 | "ap-northeast-3": "ami-07d680c934126a92b", 38 | "ap-northeast-2": "ami-01fa9f4862d957b59", 39 | "me-south-1": "ami-0827faef233b14a29", 40 | "ap-northeast-1": "ami-0d119806827c3af22", 41 | "sa-east-1": "ami-07f8dfef0a8855f06", 42 | "ap-east-1": "ami-047cb2feb00bfc834", 43 | "ca-west-1": "ami-061003b943c2d6be8", 44 | "ap-southeast-1": "ami-0a2ca6ffb79999bb5", 45 | "ap-southeast-2": "ami-0a74f3afdd309dbf2", 46 | "ap-southeast-3": "ami-091f9d0adaa612bfb", 47 | "ap-southeast-4": "ami-0ccc7ff1fcaf16948", 48 | "us-east-1": "ami-0c470d0e3eaf16e67", 49 | "us-east-2": "ami-02a7417ff5d866f4b" 50 | } 51 | }, 52 | "images-by-group": {} 53 | } 54 | 55 | The output shows the published image IDs for each region. Since those images are not 56 | public, they can only be used from within the same account. 57 | 58 | .. note:: 59 | The command can be run again without publishing anything new as long as the source path file 60 | and the config itself doesn't change. 61 | 62 | 63 | Multiple images 64 | ~~~~~~~~~~~~~~~ 65 | 66 | It's possible to publish multiple images based on the same VMDK file. The configuration looks like: 67 | 68 | .. literalinclude:: ../config-samples/config-multiple-images.yaml 69 | :language: yaml 70 | 71 | Running `awspub` using this config file will publish two images in two different regions. 72 | 73 | .. code-block:: shell 74 | 75 | awspub --log-file awspub.log create config.yaml 76 | { 77 | "images": { 78 | "my-custom-image": { 79 | "eu-central-1": "ami-0b9ed498e040c69e2" 80 | }, 81 | "my-custom-image-2": { 82 | "eu-central-2": "ami-03889118047373658" 83 | } 84 | }, 85 | "images-by-group": {} 86 | } 87 | 88 | Parameter substitution 89 | ~~~~~~~~~~~~~~~~~~~~~~ 90 | 91 | There are cases where parts of the configuration file need to be dynamic. To support that 92 | `awspub` provides basic template substitution (based on Python's `string.Template class `_) . 93 | 94 | .. literalinclude:: ../config-samples/config-with-parameters.yaml 95 | :language: yaml 96 | 97 | In the config file shown above, the identifier `$serial` which will be replaced with a value that 98 | is defined in another YAML file. This YAML file (say ``config-mapping.yaml``) contains a mapping 99 | structure (dict in python) that maps the identifiers. 100 | 101 | .. literalinclude:: ../config-samples/config-with-parameters.yaml.mapping 102 | :language: yaml 103 | 104 | Using both of these config files, the command for `awspub` becomes: 105 | 106 | .. code-block:: shell 107 | 108 | awspub --log-file awspub.log create config.yaml --config-mapping config-mapping.yaml 109 | { 110 | "images": { 111 | "my-custom-image-20171022": { 112 | "eu-central-1": "ami-0df443d5919e31d1b" 113 | } 114 | }, 115 | "images-by-group": {} 116 | } 117 | 118 | 119 | Image groups 120 | ~~~~~~~~~~~~ 121 | 122 | There might be cases were the different commands (e.g. `awspub create` or `awspub publish`) 123 | should only be applied on a subset of the defined images. That's possible with the `groups` 124 | config option: 125 | 126 | .. literalinclude:: ../config-samples/config-minimal-groups.yaml 127 | :language: yaml 128 | 129 | Use the ``--group`` parameter to filter the images that the `awspub` command should operate on: 130 | 131 | .. code-block:: shell 132 | 133 | awspub --log-file awspub.log create config.yaml --group group1 134 | { 135 | "images": { 136 | "my-custom-image-1": { 137 | "us-west-1": "ami-09461116d07dd6604" 138 | } 139 | }, 140 | "images-by-group": { 141 | "group1": { 142 | "my-custom-image-1": { 143 | "us-west-1": "ami-09461116d07dd6604" 144 | } 145 | } 146 | } 147 | } 148 | 149 | awspub --log-file awspub.log create config.yaml --group group2 150 | { 151 | "images": { 152 | "my-custom-image-2": { 153 | "us-east-1": "ami-018539227554e51fe", 154 | "ca-central-1": "ami-071d3602417c28201" 155 | } 156 | }, 157 | "images-by-group": { 158 | "group2": { 159 | "my-custom-image-2": { 160 | "us-east-1": "ami-018539227554e51fe", 161 | "ca-central-1": "ami-071d3602417c28201" 162 | } 163 | } 164 | } 165 | } 166 | 167 | The first command is applied only to images defined in `group1`, while the second one is applied 168 | only to images defined within `group2`. 169 | 170 | .. note:: 171 | If no `--group` parameter is given, the different commands operate on **all** defined images. 172 | 173 | 174 | Publish images 175 | ~~~~~~~~~~~~~~ 176 | 177 | To make images public, the configuration needs to have the `public` flag set for 178 | each image that needs to be public. 179 | 180 | .. literalinclude:: ../config-samples/config-minimal-public.yaml 181 | :language: yaml 182 | 183 | The image needs to be created and then published: 184 | 185 | .. code-block:: shell 186 | 187 | awspub create config.yaml 188 | awspub publish config.yaml 189 | 190 | Sharing images 191 | ~~~~~~~~~~~~~~ 192 | 193 | Images can be shared with other AWS accounts. For that, the account IDs of the other accounts are needed. 194 | 195 | .. literalinclude:: ../config-samples/config-minimal-share.yaml 196 | :language: yaml 197 | 198 | In the above example, the image `my-custom-image` will be shared with the account `1234567890123` 199 | when `awspub` runs in the commercial partition (``aws``, the default). It'll be shared 200 | with the account `456789012345` when `awspub` runs in the the china partition (``aws-cn``). 201 | 202 | Also sharing with an organization and organisation units is available by organization or organisational 203 | unit ARN (which already encodes partition). 204 | 205 | AWS Marketplace 206 | ~~~~~~~~~~~~~~~ 207 | 208 | It's possible to publish to `AWS Marketplace `_ if an entity of a `Single-AMI product `_ already exists, an access role ARN is available and an AMI exists in the `us-east-1` region. 209 | 210 | .. literalinclude:: ../config-samples/config-minimal-marketplace.yaml 211 | :language: yaml 212 | 213 | The image needs to be created first and the `publish` command will request a new Marketplace version 214 | for the given entity: 215 | 216 | .. code-block:: shell 217 | 218 | awspub create config.yaml 219 | awspub publish config.yaml 220 | 221 | SSM Parameter Store 222 | ~~~~~~~~~~~~~~~~~~~ 223 | 224 | It's possible to push information about published images to the `SSM Parameter Store `_. That's 225 | useful e.g. to have a common way to get the latest image ID on different 226 | regions. To push image information to the parameter store, the ``ssm_parameter`` 227 | configuration for each image must be filled: 228 | 229 | .. literalinclude:: ../config-samples/config-minimal-ssm.yaml 230 | :language: yaml 231 | 232 | along with a corresponding mapping file: 233 | 234 | .. literalinclude:: ../config-samples/config-minimal-ssm.yaml.mapping 235 | :language: yaml 236 | 237 | Create the image and use the `publish` command to publish the image and also push information to the parameter store: 238 | 239 | .. code-block:: shell 240 | 241 | awspub create config.yaml --config-mapping config.yaml.mapping 242 | awspub publish config.yaml --config-mapping config.yaml.mapping 243 | 244 | SNS Notification 245 | ~~~~~~~~~~~~~~~~ 246 | 247 | It's possible to publish messages through the `Simple Notification Service (SNS) `_. 248 | Delivery to multiple topics is possible, but the topics need to exist in each of the regions where the notification will be sent. 249 | 250 | To notify image information to users, the ``sns`` configuration for each image must be filled: 251 | 252 | .. literalinclude:: ../config-samples/config-minimal-sns.yaml 253 | :language: yaml 254 | 255 | along with a corresponding mapping file: 256 | 257 | .. literalinclude:: ../config-samples/config-minimal-sns.yaml.mapping 258 | :language: yaml 259 | 260 | Currently, the supported protocols are ``default`` and ``email`` only, and the ``default`` key is required to 261 | send notifications. 262 | The ``default`` message will be used as a fallback message for any protocols. 263 | 264 | Also, Regions can also be specified in ``sns`` configuration to indicate where the notification should be sent. If no regions are specified, SNS will default to using all regions in the partition. 265 | 266 | Create the image and use the `publish` command to publish the image and also notify the published images to users: 267 | 268 | .. code-block:: shell 269 | 270 | awspub create config.yaml --config-mapping config.yaml.mapping 271 | awspub publish config.yaml --config-mapping config.yaml.mapping 272 | 273 | Resource tags 274 | ~~~~~~~~~~~~~ 275 | 276 | The different AWS resources (S3 objects, snapshots and AMIs) can have tags associated with them. 277 | `awspub` defines some base tags which are prefixed with `awspub:`. 278 | In addition to those tags, there's a `tags` config where tags 279 | for all resources can be defined: 280 | 281 | .. literalinclude:: ../config-samples/config-minimal-tags.yaml 282 | :language: yaml 283 | 284 | This config will add the tag(s) defined to all resources. 285 | It's also possible to define image specific tags: 286 | 287 | .. literalinclude:: ../config-samples/config-minimal-image-tags.yaml 288 | :language: yaml 289 | 290 | "my-custom-image-1" would have the common tag "tag-key" plus the image specific 291 | tag "key1". 292 | "my-custom-image-2" would have the common tag "tag-key" but the value would be 293 | overwritten with "another-value" because image specific tags override the common tags. 294 | -------------------------------------------------------------------------------- /_static/custom.css: -------------------------------------------------------------------------------- 1 | /** 2 | Ubuntu variable font definitions. 3 | Based on https://github.com/canonical/vanilla-framework/blob/main/scss/_base_fontfaces.scss 4 | 5 | When font files are updated in Vanilla, the links to font files will need to be updated here as well. 6 | */ 7 | 8 | /* default font set */ 9 | @font-face { 10 | font-family: 'Ubuntu variable'; 11 | font-stretch: 100%; /* min and max value for the width axis, expressed as percentage */ 12 | font-style: normal; 13 | font-weight: 100 800; /* min and max value for the weight axis */ 14 | src: url('https://assets.ubuntu.com/v1/f1ea362b-Ubuntu%5Bwdth,wght%5D-latin-v0.896a.woff2') format('woff2-variations'); 15 | } 16 | 17 | @font-face { 18 | font-family: 'Ubuntu variable'; 19 | font-stretch: 100%; /* min and max value for the width axis, expressed as percentage */ 20 | font-style: italic; 21 | font-weight: 100 800; /* min and max value for the weight axis */ 22 | src: url('https://assets.ubuntu.com/v1/90b59210-Ubuntu-Italic%5Bwdth,wght%5D-latin-v0.896a.woff2') format('woff2-variations'); 23 | } 24 | 25 | @font-face { 26 | font-family: 'Ubuntu Mono variable'; 27 | font-style: normal; 28 | font-weight: 100 800; /* min and max value for the weight axis */ 29 | src: url('https://assets.ubuntu.com/v1/d5fc1819-UbuntuMono%5Bwght%5D-latin-v0.869.woff2') format('woff2-variations'); 30 | } 31 | 32 | /* cyrillic-ext */ 33 | @font-face { 34 | font-family: 'Ubuntu variable'; 35 | font-stretch: 100%; /* min and max value for the width axis, expressed as percentage */ 36 | font-style: normal; 37 | font-weight: 100 800; /* min and max value for the weight axis */ 38 | src: url('https://assets.ubuntu.com/v1/77cd6650-Ubuntu%5Bwdth,wght%5D-cyrillic-extended-v0.896a.woff2') format('woff2-variations'); 39 | unicode-range: U+0460-052F, U+20B4, U+2DE0-2DFF, U+A640-A69F; 40 | } 41 | 42 | /* cyrillic */ 43 | @font-face { 44 | font-family: 'Ubuntu variable'; 45 | font-stretch: 100%; /* min and max value for the width axis, expressed as percentage */ 46 | font-style: normal; 47 | font-weight: 100 800; /* min and max value for the weight axis */ 48 | src: url('https://assets.ubuntu.com/v1/2702fce5-Ubuntu%5Bwdth,wght%5D-cyrillic-v0.896a.woff2') format('woff2-variations'); 49 | unicode-range: U+0400-045F, U+0490-0491, U+04B0-04B1, U+2116; 50 | } 51 | 52 | /* greek-ext */ 53 | @font-face { 54 | font-family: 'Ubuntu variable'; 55 | font-stretch: 100%; /* min and max value for the width axis, expressed as percentage */ 56 | font-style: normal; 57 | font-weight: 100 800; /* min and max value for the weight axis */ 58 | src: url('https://assets.ubuntu.com/v1/5c108b7d-Ubuntu%5Bwdth,wght%5D-greek-extended-v0.896a.woff2') format('woff2-variations'); 59 | unicode-range: U+1F00-1FFF; 60 | } 61 | 62 | /* greek */ 63 | @font-face { 64 | font-family: 'Ubuntu variable'; 65 | font-stretch: 100%; /* min and max value for the width axis, expressed as percentage */ 66 | font-style: normal; 67 | font-weight: 100 800; /* min and max value for the weight axis */ 68 | src: url('https://assets.ubuntu.com/v1/0a14c405-Ubuntu%5Bwdth,wght%5D-greek-v0.896a.woff2') format('woff2-variations'); 69 | unicode-range: U+0370-03FF; 70 | } 71 | 72 | /* latin-ext */ 73 | @font-face { 74 | font-family: 'Ubuntu variable'; 75 | font-stretch: 100%; /* min and max value for the width axis, expressed as percentage */ 76 | font-style: normal; 77 | font-weight: 100 800; /* min and max value for the weight axis */ 78 | src: url('https://assets.ubuntu.com/v1/19f68eeb-Ubuntu%5Bwdth,wght%5D-latin-extended-v0.896a.woff2') format('woff2-variations'); 79 | unicode-range: U+0100-024F, U+1E00-1EFF, U+20A0-20AB, U+20AD-20CF, U+2C60-2C7F, U+A720-A7FF; 80 | } 81 | 82 | 83 | /** Define font-weights as per Vanilla 84 | Based on: https://github.com/canonical/vanilla-framework/blob/main/scss/_base_typography-definitions.scss 85 | 86 | regular text: 400, 87 | bold: 550, 88 | thin: 300, 89 | 90 | h1: bold, 91 | h2: 180; 92 | h3: bold, 93 | h4: 275, 94 | h5: bold, 95 | h6: regular 96 | */ 97 | 98 | /* default regular text */ 99 | html { 100 | font-weight: 400; 101 | } 102 | 103 | /* heading specific definitions */ 104 | h1, h3, h5 { font-weight: 550; } 105 | h2 { font-weight: 180; } 106 | h4 { font-weight: 275; } 107 | 108 | /* bold */ 109 | .toc-tree li.scroll-current>.reference, 110 | dl.glossary dt, 111 | dl.simple dt, 112 | dl:not([class]) dt { 113 | font-weight: 550; 114 | } 115 | 116 | 117 | /** Table styling **/ 118 | 119 | th.head { 120 | text-transform: uppercase; 121 | font-size: var(--font-size--small); 122 | text-align: initial; 123 | } 124 | 125 | table.align-center th.head { 126 | text-align: center 127 | } 128 | 129 | table.docutils { 130 | border: 0; 131 | box-shadow: none; 132 | width:100%; 133 | } 134 | 135 | table.docutils td, table.docutils th, table.docutils td:last-child, table.docutils th:last-child, table.docutils td:first-child, table.docutils th:first-child { 136 | border-right: none; 137 | border-left: none; 138 | } 139 | 140 | /* Allow to centre text horizontally in table data cells */ 141 | table.align-center { 142 | text-align: center !important; 143 | } 144 | 145 | /** No rounded corners **/ 146 | 147 | .admonition, code.literal, .sphinx-tabs-tab, .sphinx-tabs-panel, .highlight { 148 | border-radius: 0; 149 | } 150 | 151 | /** Admonition styling **/ 152 | 153 | .admonition { 154 | border-top: 1px solid #d9d9d9; 155 | border-right: 1px solid #d9d9d9; 156 | border-bottom: 1px solid #d9d9d9; 157 | } 158 | 159 | /** Color for the "copy link" symbol next to headings **/ 160 | 161 | a.headerlink { 162 | color: var(--color-brand-primary); 163 | } 164 | 165 | /** Line to the left of the current navigation entry **/ 166 | 167 | .sidebar-tree li.current-page { 168 | border-left: 2px solid var(--color-brand-primary); 169 | } 170 | 171 | /** Some tweaks for Sphinx tabs **/ 172 | 173 | [role="tablist"] { 174 | border-bottom: 1px solid var(--color-sidebar-item-background--hover); 175 | } 176 | 177 | .sphinx-tabs-tab[aria-selected="true"], .sd-tab-set>input:checked+label{ 178 | border: 0; 179 | border-bottom: 2px solid var(--color-brand-primary); 180 | font-weight: 400; 181 | font-size: 1rem; 182 | color: var(--color-brand-primary); 183 | } 184 | 185 | body[data-theme="dark"] .sphinx-tabs-tab[aria-selected="true"] { 186 | background: var(--color-background-primary); 187 | border-bottom: 2px solid var(--color-brand-primary); 188 | } 189 | 190 | button.sphinx-tabs-tab[aria-selected="false"]:hover, .sd-tab-set>input:not(:checked)+label:hover { 191 | border-bottom: 2px solid var(--color-foreground-border); 192 | } 193 | 194 | button.sphinx-tabs-tab[aria-selected="false"]{ 195 | border-bottom: 2px solid var(--color-background-primary); 196 | } 197 | 198 | body[data-theme="dark"] .sphinx-tabs-tab { 199 | background: var(--color-background-primary); 200 | } 201 | 202 | .sphinx-tabs-tab, .sd-tab-set>label{ 203 | color: var(--color-brand-primary); 204 | font-family: var(--font-stack); 205 | font-weight: 400; 206 | font-size: 1rem; 207 | padding: 1em 1.25em .5em 208 | } 209 | 210 | .sphinx-tabs-panel { 211 | border: 0; 212 | border-bottom: 1px solid var(--color-sidebar-item-background--hover); 213 | background: var(--color-background-primary); 214 | padding: 0.75rem 0 0.75rem 0; 215 | } 216 | 217 | body[data-theme="dark"] .sphinx-tabs-panel { 218 | background: var(--color-background-primary); 219 | } 220 | 221 | /** A tweak for issue #190 **/ 222 | 223 | .highlight .hll { 224 | background-color: var(--color-highlighted-background); 225 | } 226 | 227 | 228 | /** Custom classes to fix scrolling in tables by decreasing the 229 | font size or breaking certain columns. 230 | Specify the classes in the Markdown file with, for example: 231 | ```{rst-class} break-col-4 min-width-4-8 232 | ``` 233 | **/ 234 | 235 | table.dec-font-size { 236 | font-size: smaller; 237 | } 238 | table.break-col-1 td.text-left:first-child { 239 | word-break: break-word; 240 | } 241 | table.break-col-4 td.text-left:nth-child(4) { 242 | word-break: break-word; 243 | } 244 | table.min-width-1-15 td.text-left:first-child { 245 | min-width: 15em; 246 | } 247 | table.min-width-4-8 td.text-left:nth-child(4) { 248 | min-width: 8em; 249 | } 250 | 251 | /** Underline for abbreviations **/ 252 | 253 | abbr[title] { 254 | text-decoration: underline solid #cdcdcd; 255 | } 256 | 257 | /** Use the same style for right-details as for left-details **/ 258 | .bottom-of-page .right-details { 259 | font-size: var(--font-size--small); 260 | display: block; 261 | } 262 | 263 | /** Version switcher */ 264 | button.version_select { 265 | color: var(--color-foreground-primary); 266 | background-color: var(--color-toc-background); 267 | padding: 5px 10px; 268 | border: none; 269 | } 270 | 271 | .version_select:hover, .version_select:focus { 272 | background-color: var(--color-sidebar-item-background--hover); 273 | } 274 | 275 | .version_dropdown { 276 | position: relative; 277 | display: inline-block; 278 | text-align: right; 279 | font-size: var(--sidebar-item-font-size); 280 | } 281 | 282 | .available_versions { 283 | display: none; 284 | position: absolute; 285 | right: 0px; 286 | background-color: var(--color-toc-background); 287 | box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2); 288 | z-index: 11; 289 | } 290 | 291 | .available_versions a { 292 | color: var(--color-foreground-primary); 293 | padding: 12px 16px; 294 | text-decoration: none; 295 | display: block; 296 | } 297 | 298 | .available_versions a:hover {background-color: var(--color-sidebar-item-background--current)} 299 | 300 | /** Suppress link underlines outside on-hover **/ 301 | a { 302 | text-decoration: none; 303 | } 304 | 305 | a:hover, a:visited:hover { 306 | text-decoration: underline; 307 | } 308 | 309 | .show {display:block;} 310 | 311 | /** Fix for nested numbered list - the nested list is lettered **/ 312 | ol.arabic ol.arabic { 313 | list-style: lower-alpha; 314 | } 315 | 316 | /** Make expandable sections look like links **/ 317 | details summary { 318 | color: var(--color-link); 319 | } 320 | 321 | /** Fix the styling of the version box for readthedocs **/ 322 | 323 | #furo-readthedocs-versions .rst-versions, #furo-readthedocs-versions .rst-current-version, #furo-readthedocs-versions:focus-within .rst-current-version, #furo-readthedocs-versions:hover .rst-current-version { 324 | background: var(--color-sidebar-item-background--hover); 325 | } 326 | 327 | .rst-versions .rst-other-versions dd a { 328 | color: var(--color-link); 329 | } 330 | 331 | #furo-readthedocs-versions:focus-within .rst-current-version .fa-book, #furo-readthedocs-versions:hover .rst-current-version .fa-book, .rst-versions .rst-other-versions { 332 | color: var(--color-sidebar-link-text); 333 | } 334 | 335 | .rst-versions .rst-current-version { 336 | color: var(--color-version-popup); 337 | font-weight: bolder; 338 | } 339 | 340 | /* Code-block copybutton invisible by default 341 | (overriding Furo config to achieve default copybutton setting). */ 342 | .highlight button.copybtn { 343 | opacity: 0; 344 | } 345 | 346 | /* Mimicking the 'Give feedback' button for UX consistency */ 347 | .sidebar-search-container input[type=submit] { 348 | color: #FFFFFF; 349 | border: 2px solid #D6410D; 350 | padding: var(--sidebar-search-input-spacing-vertical) var(--sidebar-search-input-spacing-horizontal); 351 | background: #D6410D; 352 | font-weight: bold; 353 | font-size: var(--font-size--small); 354 | cursor: pointer; 355 | } 356 | 357 | .sidebar-search-container input[type=submit]:hover { 358 | text-decoration: underline; 359 | } 360 | 361 | /* Make inline code the same size as code blocks */ 362 | p code.literal { 363 | border: 0; 364 | font-size: var(--code-font-size); 365 | } 366 | 367 | /* Use the general admonition font size for inline code */ 368 | .admonition p code.literal { 369 | font-size: var(--admonition-font-size); 370 | } 371 | 372 | .highlight .s, .highlight .s1, .highlight .s2 { 373 | color: #3F8100; 374 | } 375 | 376 | .highlight .o { 377 | color: #BB5400; 378 | } 379 | -------------------------------------------------------------------------------- /awspub/s3.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import hashlib 3 | import logging 4 | import os 5 | from typing import Dict 6 | 7 | import boto3 8 | from mypy_boto3_s3.type_defs import CompletedPartTypeDef 9 | 10 | from awspub.context import Context 11 | from awspub.exceptions import BucketDoesNotExistException 12 | 13 | # chunk size is required for calculating the checksums 14 | MULTIPART_CHUNK_SIZE = 8 * 1024 * 1024 15 | 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | class S3: 21 | """ 22 | Handle S3 API interaction 23 | """ 24 | 25 | def __init__(self, context: Context): 26 | """ 27 | :param context: 28 | "type context: awspub.context.Context 29 | """ 30 | self._ctx: Context = context 31 | self._s3client = boto3.client("s3") 32 | self._bucket_region = None 33 | 34 | @property 35 | def bucket_region(self): 36 | if not self._bucket_region: 37 | if not self._bucket_exists(): 38 | raise BucketDoesNotExistException(self.bucket_name) 39 | self._bucket_region = self._s3client.head_bucket(Bucket=self.bucket_name)["BucketRegion"] 40 | 41 | return self._bucket_region 42 | 43 | @property 44 | def bucket_name(self): 45 | return self._ctx.conf["s3"]["bucket_name"] 46 | 47 | def __repr__(self): 48 | return ( 49 | f"<{self.__class__} bucket:'{self.bucket_name}' " 50 | f"region:'{self.bucket_region} key:{self._ctx.source_sha256}'>" 51 | ) 52 | 53 | def _multipart_sha256sum(self, file_path: str) -> str: 54 | """ 55 | Calculate the sha256 checksum like AWS does it (in a multipart upload) per chunk 56 | See https://docs.aws.amazon.com/AmazonS3/latest/userguide/checking-object-integrity.html#large-object-checksums 57 | 58 | :param file_path: the path to the local file to upload 59 | :type file_path: str 60 | """ 61 | sha256_list = [] 62 | count = 0 63 | with open(file_path, "rb") as f: 64 | for chunk in iter(lambda: f.read(MULTIPART_CHUNK_SIZE), b""): 65 | sha256_list.append(hashlib.sha256(chunk)) 66 | count += 1 67 | 68 | sha256_list_digest_concatenated = b"".join([s.digest() for s in sha256_list]) 69 | sha256_b64 = base64.b64encode(hashlib.sha256(sha256_list_digest_concatenated).digest()) 70 | return f"{sha256_b64.decode('ascii')}-{count}" 71 | 72 | def _bucket_exists(self) -> bool: 73 | """ 74 | Check if the S3 bucket from context exists 75 | 76 | :return: True if the bucket exists, otherwise False 77 | :rtype: bool 78 | """ 79 | resp = self._s3client.list_buckets() 80 | return self.bucket_name in [b["Name"] for b in resp["Buckets"]] 81 | 82 | def upload_file(self, source_path: str): 83 | """ 84 | Upload a given file to the bucket from context. The key name will be the sha256sum hexdigest of the file. 85 | If a file with that name already exist in the given bucket and the calculated sha256sum matches 86 | the sha256sum from S3, nothing will be uploaded. Instead the existing file will be used. 87 | This method does use a multipart upload internally so an upload can be retriggered in case 88 | of errors and the previously uploaded content will be reused. 89 | Note: be aware that failed multipart uploads are not deleted. So it's recommended to setup 90 | a bucket lifecycle rule to delete incomplete multipart uploads. 91 | See https://docs.aws.amazon.com/AmazonS3/latest/userguide//mpu-abort-incomplete-mpu-lifecycle-config.html 92 | 93 | :param source_path: the path to the local file to upload (usually a .vmdk file) 94 | :type source_path: str 95 | """ 96 | # make sure the bucket exists 97 | if not self._bucket_exists(): 98 | raise BucketDoesNotExistException(self.bucket_name) 99 | 100 | s3_sha256sum = self._multipart_sha256sum(source_path) 101 | 102 | try: 103 | # check if the key exists already in the bucket and if so, if the multipart upload 104 | # sha256sum does match 105 | head = self._s3client.head_object( 106 | Bucket=self.bucket_name, Key=self._ctx.source_sha256, ChecksumMode="ENABLED" 107 | ) 108 | 109 | if head["ChecksumSHA256"] == s3_sha256sum: 110 | logger.info( 111 | f"'{self._ctx.source_sha256}' in bucket '{self.bucket_name}' " 112 | "already exists and sha256sum matches. nothing to upload to S3" 113 | ) 114 | return 115 | else: 116 | logger.warning( 117 | f"'{self._ctx.source_sha256}' in bucket '{self.bucket_name}' " 118 | f"already exists but sha256sum does not match. Will be overwritten ..." 119 | ) 120 | except Exception: 121 | logging.debug(f"Can not find '{self._ctx.source_sha256}' in bucket '{self.bucket_name}'") 122 | 123 | # do the real upload 124 | self._upload_file_multipart(source_path, s3_sha256sum) 125 | 126 | def _get_multipart_upload_id(self) -> str: 127 | """ 128 | Get an existing or create a multipart upload id 129 | 130 | :return: a multipart upload id 131 | :rtype: str 132 | """ 133 | resp = self._s3client.list_multipart_uploads(Bucket=self.bucket_name) 134 | multipart_uploads = [ 135 | upload["UploadId"] for upload in resp.get("Uploads", []) if upload["Key"] == self._ctx.source_sha256 136 | ] 137 | if len(multipart_uploads) == 1: 138 | logger.info(f"found existing multipart upload '{multipart_uploads[0]}' for key '{self._ctx.source_sha256}'") 139 | return multipart_uploads[0] 140 | elif len(multipart_uploads) == 0: 141 | # create a new multipart upload 142 | resp_create = self._s3client.create_multipart_upload( 143 | Bucket=self.bucket_name, 144 | Key=self._ctx.source_sha256, 145 | ChecksumAlgorithm="SHA256", 146 | ACL="private", 147 | ) 148 | upload_id = resp_create["UploadId"] 149 | logger.info( 150 | f"new multipart upload (upload id: '{upload_id})' started in bucket " 151 | f"{self.bucket_name} for key {self._ctx.source_sha256}" 152 | ) 153 | # if there's an expire rule configured for that bucket, inform about it 154 | if resp_create.get("AbortDate"): 155 | logger.info( 156 | f"multipart upload '{upload_id}' will expire at " 157 | f"{resp_create['AbortDate']} (rule: {resp_create.get('AbortRuleId')})" 158 | ) 159 | else: 160 | logger.warning("there is no matching expire/lifecycle rule configured for incomplete multipart uploads") 161 | return upload_id 162 | else: 163 | # multiple multipart uploads for the same key available 164 | logger.warning( 165 | f"there are multiple ({len(multipart_uploads)}) multipart uploads ongoing in " 166 | f"bucket {self.bucket_name} for key {self._ctx.source_sha256}" 167 | ) 168 | logger.warning("using the first found multipart upload but you should delete pending multipart uploads") 169 | return multipart_uploads[0] 170 | 171 | def _upload_file_multipart(self, source_path: str, s3_sha256sum: str) -> None: 172 | """ 173 | Upload a given file to the bucket from context. The key name will be the sha256sum hexdigest of the file 174 | 175 | :param source_path: the path to the local file to upload (usually a .vmdk file) 176 | :type source_path: str 177 | :param s3_sha256sum: the sha256sum how S3 calculates it 178 | :type s3_sha256sum: str 179 | """ 180 | upload_id = self._get_multipart_upload_id() 181 | 182 | logger.info(f"using upload id '{upload_id}' for multipart upload of '{source_path}' ...") 183 | resp_list_parts = self._s3client.list_parts( 184 | Bucket=self.bucket_name, Key=self._ctx.source_sha256, UploadId=upload_id 185 | ) 186 | 187 | # sanity check for the used checksum algorithm 188 | if resp_list_parts["ChecksumAlgorithm"] != "SHA256": 189 | logger.error(f"available ongoing multipart upload '{upload_id}' does not use SHA256 as checksum algorithm") 190 | 191 | # already available parts 192 | parts_available = {p["PartNumber"]: p for p in resp_list_parts.get("Parts", [])} 193 | # keep a list of parts (either already available or created) required to complete the multipart upload 194 | parts: Dict[int, CompletedPartTypeDef] = {} 195 | parts_size_done: int = 0 196 | source_path_size: int = os.path.getsize(source_path) 197 | with open(source_path, "rb") as f: 198 | # parts start at 1 (not 0) 199 | for part_number, chunk in enumerate(iter(lambda: f.read(MULTIPART_CHUNK_SIZE), b""), start=1): 200 | # the sha256sum of the current part 201 | sha256_part = base64.b64encode(hashlib.sha256(chunk).digest()).decode("ascii") 202 | # do nothing if that part number already exist and the sha256sum matches 203 | if parts_available.get(part_number): 204 | if parts_available[part_number]["ChecksumSHA256"] == sha256_part: 205 | logger.info(f"part {part_number} already exists and sha256sum matches. continue") 206 | parts[part_number] = dict( 207 | PartNumber=part_number, 208 | ETag=parts_available[part_number]["ETag"], 209 | ChecksumSHA256=parts_available[part_number]["ChecksumSHA256"], 210 | ) 211 | parts_size_done += len(chunk) 212 | continue 213 | else: 214 | logger.info(f"part {part_number} already exists but will be overwritten") 215 | 216 | # upload a new part 217 | resp_upload_part = self._s3client.upload_part( 218 | Body=chunk, 219 | Bucket=self.bucket_name, 220 | ContentLength=len(chunk), 221 | ChecksumAlgorithm="SHA256", 222 | ChecksumSHA256=sha256_part, 223 | Key=self._ctx.source_sha256, 224 | PartNumber=part_number, 225 | UploadId=upload_id, 226 | ) 227 | parts_size_done += len(chunk) 228 | # add new part to the dict of parts 229 | parts[part_number] = dict( 230 | PartNumber=part_number, 231 | ETag=resp_upload_part["ETag"], 232 | ChecksumSHA256=sha256_part, 233 | ) 234 | logger.info( 235 | f"part {part_number} uploaded ({round(parts_size_done/source_path_size * 100, 2)}% " 236 | f"; {parts_size_done} / {source_path_size} bytes)" 237 | ) 238 | 239 | logger.info( 240 | f"finishing the multipart upload for key '{self._ctx.source_sha256}' in bucket {self.bucket_name} now ..." 241 | ) 242 | # finish the multipart upload 243 | self._s3client.complete_multipart_upload( 244 | Bucket=self.bucket_name, 245 | Key=self._ctx.source_sha256, 246 | UploadId=upload_id, 247 | ChecksumSHA256=s3_sha256sum, 248 | MultipartUpload={"Parts": [value for key, value in parts.items()]}, 249 | ) 250 | logger.info( 251 | f"multipart upload finished and key '{self._ctx.source_sha256}' now " 252 | f"available in bucket '{self.bucket_name}'" 253 | ) 254 | 255 | # add tagging to the final s3 object 256 | self._s3client.put_object_tagging( 257 | Bucket=self.bucket_name, 258 | Key=self._ctx.source_sha256, 259 | Tagging={ 260 | "TagSet": self._ctx.tags, 261 | }, 262 | ) 263 | --------------------------------------------------------------------------------