├── docs ├── .sphinx │ ├── version │ ├── pa11y.json │ ├── .pymarkdown.json │ ├── metrics │ │ ├── source_metrics.sh │ │ └── build_metrics.py │ ├── get_vale_conf.py │ └── update_sp.py ├── requirements.txt ├── .gitignore ├── .pre-commit-config.yaml ├── .custom_wordlist.txt ├── index.md ├── _static │ ├── cookie-banner.css │ └── js │ │ └── bundle.js ├── _templates │ ├── header.html │ └── footer.html ├── about.md ├── products.md ├── Makefile └── conf.py ├── .github ├── pull_request_template.md └── workflows │ ├── cla-check.yml │ ├── automatic-doc-checks.yml │ ├── markdown-style-checks.yml │ ├── sphinx-python-dependency-build-checks.yml │ └── check-removed-urls.yml ├── LICENSE ├── README.md └── .readthedocs.yaml /docs/.sphinx/version: -------------------------------------------------------------------------------- 1 | 1.2.0 2 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | canonical-sphinx[full] 2 | packaging 3 | sphinxcontrib-svg2pdfconverter[CairoSVG] 4 | sphinx-last-updated-by-git 5 | sphinx-sitemap 6 | -------------------------------------------------------------------------------- /docs/.sphinx/pa11y.json: -------------------------------------------------------------------------------- 1 | { 2 | "chromeLaunchConfig": { 3 | "args": [ 4 | "--no-sandbox" 5 | ] 6 | }, 7 | "reporter": "cli", 8 | "standard": "WCAG2AA" 9 | } 10 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | - [ ] Have you updated `CHANGELOG.md` with relevant non-documentation file changes? 2 | - [ ] Have you updated the documentation for this change? 3 | 4 | ----- 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2025 Canonical Ltd. 2 | 3 | This work is licensed under the Creative Commons Attribution-Share Alike 3.0 4 | Unported License. To view a copy of this license, visit 5 | http://creativecommons.org/licenses/by-sa/3.0/ or send a letter to Creative 6 | Commons, 171 Second Street, Suite 300, San Francisco, California, 94105, USA. -------------------------------------------------------------------------------- /.github/workflows/cla-check.yml: -------------------------------------------------------------------------------- 1 | # This workflow checks if the contributor has signed the Canonical Contributor Licence Agreement (CLA) 2 | name: Canonical Contributor Licence Agreement check 3 | 4 | on: 5 | pull_request: 6 | branches: [main] 7 | 8 | jobs: 9 | cla-check: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Check if CLA signed 13 | uses: canonical/has-signed-canonical-cla@v2 14 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | # Environment 2 | *env*/ 3 | .sphinx/venv/ 4 | 5 | # Sphinx 6 | .sphinx/warnings.txt 7 | .sphinx/.wordlist.dic 8 | .sphinx/.doctrees/ 9 | .sphinx/update/ 10 | .sphinx/node_modules/ 11 | 12 | # Vale 13 | .sphinx/styles/* 14 | .sphinx/vale.ini 15 | 16 | # Build outputs 17 | _build 18 | 19 | # Node.js 20 | package*.json 21 | 22 | # Unrelated cache and config files 23 | .DS_Store 24 | __pycache__ 25 | .idea/ 26 | .vscode/ 27 | -------------------------------------------------------------------------------- /.github/workflows/automatic-doc-checks.yml: -------------------------------------------------------------------------------- 1 | # 2 | name: Automatic doc checks 3 | 4 | on: 5 | push: 6 | branches: [ main ] 7 | pull_request: 8 | paths: 9 | - 'docs/**' # Only run on changes to the docs directory 10 | 11 | workflow_dispatch: 12 | # Manual trigger 13 | 14 | 15 | concurrency: 16 | group: ${{ github.workflow }}-${{ github.ref }} 17 | cancel-in-progress: true 18 | 19 | jobs: 20 | documentation-checks: 21 | uses: canonical/documentation-workflows/.github/workflows/documentation-checks.yaml@main 22 | with: 23 | working-directory: "docs" 24 | fetch-depth: 0 25 | -------------------------------------------------------------------------------- /.github/workflows/markdown-style-checks.yml: -------------------------------------------------------------------------------- 1 | name: Markdown style checks 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - 'docs/**' # Only run on changes to the docs directory 9 | pull_request: 10 | branches: 11 | - '*' 12 | paths: 13 | - 'docs/**' # Only run on changes to the docs directory 14 | 15 | jobs: 16 | markdown-lint: 17 | runs-on: ubuntu-22.04 18 | steps: 19 | - uses: actions/checkout@v4 20 | with: 21 | fetch-depth: 0 22 | - name: Create venv 23 | working-directory: "docs" 24 | run: make install 25 | - name: Lint markdown 26 | working-directory: "docs" 27 | run: make lint-md 28 | -------------------------------------------------------------------------------- /docs/.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: local 3 | hooks: 4 | - id: make-spelling 5 | name: Run make spelling 6 | entry: make -C docs spelling 7 | language: system 8 | pass_filenames: false 9 | files: ^docs/.*\.(rst|md|txt)$ 10 | 11 | - id: make-linkcheck 12 | name: Run make linkcheck 13 | entry: make -C docs linkcheck 14 | language: system 15 | pass_filenames: false 16 | files: ^docs/.*\.(rst|md|txt)$ 17 | 18 | - id: make-woke 19 | name: Run make woke 20 | entry: make -C docs woke 21 | language: system 22 | pass_filenames: false 23 | files: ^docs/.*\.(rst|md|txt)$ 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Air-gapped documentation at Canonical 2 | 3 | Key concepts about air-gapped environments and links to product-specific air-gapped documentation at Canonical. 4 | 5 | ## Contributing 6 | 7 | We welcome contributions to this project! If you have suggestions, bug fixes, or improvements, please open an issue or submit a pull request. 8 | 9 | Please read and sign our [Contributor License Agreement (CLA)](https://ubuntu.com/legal/contributors) before submitting any changes. The agreement grants Canonical permission to use your contributions. The author of a change remains the copyright owner of their code (no copyright assignment occurs). 10 | 11 | 12 | 13 | [Sphinx]: https://www.sphinx-doc.org/ 14 | [Contributor Licence Agreement (CLA)]: https://ubuntu.com/legal/contributors 15 | -------------------------------------------------------------------------------- /docs/.custom_wordlist.txt: -------------------------------------------------------------------------------- 1 | # Leave a blank line at the end of this file to support concatenation 2 | airgap 3 | airgapped 4 | backend 5 | backends 6 | Charmcraft 7 | cjk 8 | cryptographically 9 | dvipng 10 | fonts 11 | freefont 12 | github 13 | GPG 14 | gyre 15 | https 16 | html 17 | io 18 | Intersphinx 19 | lang 20 | LaTeX 21 | latexmk 22 | Multipass 23 | otf 24 | plantuml 25 | PNG 26 | Pygments 27 | pymarkdown 28 | QEMU 29 | Rockcraft 30 | readthedocs 31 | rst 32 | sitemapindex 33 | subproject 34 | subprojects 35 | SVG 36 | tex 37 | texlive 38 | TOC 39 | toctree 40 | txt 41 | uncommenting 42 | utils 43 | VMs 44 | WCAG 45 | whitespace 46 | whitespaces 47 | wordlist 48 | xetex 49 | xindy 50 | xml 51 | ip 52 | spread_test_example 53 | Furo 54 | PDF 55 | Open Graph 56 | MyST 57 | YouTube 58 | reStructuredText 59 | GitHub 60 | Sphinx 61 | URL 62 | PR 63 | Read the Docs 64 | Spread 65 | landscape 66 | lastmod 67 | yaml 68 | -------------------------------------------------------------------------------- /docs/.sphinx/.pymarkdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": { 3 | "selectively_enable_rules": true, 4 | "heading-style": { 5 | "enabled": true, 6 | "style": "atx" 7 | }, 8 | "commands-show-output": { 9 | "enabled": true 10 | }, 11 | "no-missing-space-atx": { 12 | "enabled": true 13 | }, 14 | "blanks-around-headings": { 15 | "enabled": false 16 | }, 17 | "heading-start-left": { 18 | "enabled": true 19 | }, 20 | "no-trailing-punctuation": { 21 | "enabled": true, 22 | "punctuation": ".,;。,;" 23 | }, 24 | "blanks-around-fences": { 25 | "enabled": true, 26 | "list_items": false 27 | }, 28 | "blanks-around-lists": { 29 | "enabled": true 30 | }, 31 | "hr-style": { 32 | "enabled": true 33 | }, 34 | "no-empty-links": { 35 | "enabled": true 36 | }, 37 | "no-alt-text": { 38 | "enabled": true 39 | } 40 | }, 41 | "extensions": { 42 | "front-matter" : { 43 | "enabled" : true 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.11" 13 | jobs: 14 | post_checkout: 15 | - git fetch --unshallow || true 16 | # Cancel building pull requests when there aren't changed in the docs directory. 17 | # If there are no changes (git diff exits with 0) we force the command to return with 183. 18 | # This is a special exit code on Read the Docs that will cancel the build immediately. 19 | # https://docs.readthedocs.io/en/stable/build-customization.html#cancel-build-based-on-a-condition 20 | - | 21 | if [ "$READTHEDOCS_VERSION_TYPE" = "external" ] && git diff --quiet origin/main -- 'docs/' '.readthedocs.yaml'; 22 | then 23 | exit 183; 24 | fi 25 | 26 | # Build documentation in the docs/ directory with Sphinx 27 | sphinx: 28 | builder: dirhtml 29 | configuration: docs/conf.py 30 | fail_on_warning: true 31 | 32 | # If using Sphinx, optionally build your docs in additional formats such as PDF 33 | formats: 34 | - pdf 35 | 36 | # Optionally declare the Python requirements required to build your docs 37 | python: 38 | install: 39 | - requirements: docs/requirements.txt 40 | -------------------------------------------------------------------------------- /.github/workflows/sphinx-python-dependency-build-checks.yml: -------------------------------------------------------------------------------- 1 | # The purpose of this workflow file is to confirm that the Sphinx 2 | # virtual environment can be built from source, consequently documenting 3 | # the packages required in the build environment to do that. 4 | # 5 | # This is needed because some projects embeds the documentation into built 6 | # artifacts which involves rendering the documentation on the target 7 | # architecture. 8 | # 9 | # Depending on the architecture, pip may or may not have already built wheels 10 | # available, and as such we need to make sure building wheels from source can 11 | # succeed. 12 | name: Sphinx python dependency build checks 13 | 14 | on: 15 | push: 16 | branches: [ main ] 17 | pull_request: 18 | workflow_dispatch: # manual trigger 19 | 20 | concurrency: 21 | group: ${{ github.workflow }}-${{ github.ref }} 22 | cancel-in-progress: true 23 | 24 | jobs: 25 | build: 26 | name: build 27 | runs-on: ubuntu-latest 28 | steps: 29 | - name: Checkout code 30 | uses: actions/checkout@v4 31 | 32 | - name: Install dependencies 33 | run: | 34 | set -ex 35 | sudo apt-get --fix-missing update 36 | sudo apt -y install \ 37 | cargo \ 38 | libpython3-dev \ 39 | libxml2-dev \ 40 | libxslt1-dev \ 41 | make \ 42 | python3-venv \ 43 | rustc \ 44 | libtiff5-dev libjpeg8-dev libopenjp2-7-dev zlib1g-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.6-dev tk8.6-dev python3-tk libharfbuzz-dev libfribidi-dev libxcb1-dev 45 | - name: Build Sphinx venv 46 | working-directory: "docs" 47 | run: | 48 | set -ex 49 | make install \ 50 | PIPOPTS="--no-binary :all:" \ 51 | || ( cat .sphinx/venv/pip_install.log && exit 1 ) 52 | -------------------------------------------------------------------------------- /.github/workflows/check-removed-urls.yml: -------------------------------------------------------------------------------- 1 | name: Check for removed URLs 2 | 3 | on: 4 | pull_request: 5 | branches: [main] 6 | 7 | jobs: 8 | build-docs: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout PR branch 12 | uses: actions/checkout@v4 13 | with: 14 | repository: ${{ github.event.pull_request.head.repo.full_name }} 15 | ref: ${{ github.event.pull_request.head.ref }} 16 | fetch-depth: 0 17 | path: compare 18 | - name: Checkout base branch 19 | uses: actions/checkout@v4 20 | with: 21 | ref: ${{ github.event.pull_request.base.ref }} 22 | repository: ${{ github.event.pull_request.base.repo.full_name }} 23 | fetch-depth: 0 24 | path: base 25 | - uses: actions/setup-python@v5 26 | - name: Build docs 27 | run: | 28 | for dir in compare base; do 29 | pushd ${dir}/docs 30 | make install 31 | . .sphinx/venv/bin/activate 32 | make html 33 | popd 34 | done 35 | - name: Generate current URLs list 36 | run: | 37 | for dir in compare base; do 38 | pushd ${dir}/docs 39 | find ./_build/ -name '*.html' \ 40 | | sed 's|/_build||;s|/index.html$|/|;s|.html$||' \ 41 | | sort > urls.txt 42 | popd 43 | done 44 | - name: Compare URLs 45 | run: | 46 | BASE_URLS_PATH="base/docs/urls.txt" 47 | COMPARE_URLS_PATH="compare/docs/urls.txt" 48 | removed=$(comm -23 ${BASE_URLS_PATH} ${COMPARE_URLS_PATH} ) 49 | if [ -n "$removed" ]; then 50 | echo "The following URLs were removed:" 51 | echo "$removed" 52 | echo "Please ensure removed pages are redirected" 53 | exit 1 54 | fi -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | myst: 3 | html_meta: 4 | "description": "Learn about key airgapped concepts and product-specific airgapped documentation at Canonical, including Ubuntu Pro, Landscape, Juju, and MAAS." 5 | --- 6 | 7 | # Airgapped documentation at Canonical 8 | 9 | Airgapped environments are offline from the public internet, either through strict network controls or full physical isolation. These environments can be complex, and are often used to meet certain security, compliance, or operational requirements. 10 | 11 | Canonical provides tools and guidance to support deploying and maintaining Ubuntu and other Canonical products in airgapped environments. This documentation set explains key concepts, types of airgaps, and considerations for planning airgapped deployments. It also links to product-specific documentation. 12 | 13 | This documentation is intended for administrators, security teams, and engineers who manage systems in any type of airgapped environment. 14 | 15 | --- 16 | 17 | ## In this documentation 18 | 19 | ````{grid} 1 20 | 21 | ```{grid-item-card} [About airgapped environments](about) 22 | 23 | **Key concepts**: Understand key concepts about airgaps 24 | ``` 25 | 26 | ```{grid-item-card} [Airgapped documentation for Canonical products](products) 27 | 28 | **Product documentation**: Links to airgapped documentation for specific products 29 | ``` 30 | 31 | ```` 32 | 33 | ## Project and community 34 | 35 | This documentation set warmly welcomes community contributions, suggestions, fixes, and constructive feedback. 36 | 37 | ### Get involved 38 | 39 | - [Get support](https://ubuntu.com/support/community-support) 40 | - [Join the Discourse forum](https://discourse.ubuntu.com/) 41 | 42 | ### Governance and policies 43 | 44 | - [Code of conduct](https://ubuntu.com/community/docs/ethos/code-of-conduct) 45 | 46 | ```{toctree} 47 | :hidden: 48 | :maxdepth: 2 49 | 50 | Home 51 | about 52 | Product-specific documentation 53 | 54 | -------------------------------------------------------------------------------- /docs/_static/cookie-banner.css: -------------------------------------------------------------------------------- 1 | /* Cookie policy styling WILL BE REMOVED when implementation of new theme with vanilla is implemented */ 2 | .cookie-policy { 3 | overflow: auto; 4 | top: 35%; 5 | z-index: 50; 6 | position: fixed; 7 | } 8 | 9 | dialog.cookie-policy { 10 | background-color: var(--color-code-background); 11 | color: var(--color-code-foreground); 12 | height: auto; 13 | max-height: 60vh; 14 | max-width: 40rem; 15 | padding: 0 1rem 0 1rem; 16 | width: auto; 17 | } 18 | 19 | header.p-modal__header { 20 | margin-bottom: .5rem; 21 | } 22 | 23 | header.p-modal__header::after { 24 | background-color: #d9d9d9; 25 | content: ""; 26 | height: 1px; 27 | left: 0; 28 | margin-left: 1rem; 29 | margin-right: 1rem; 30 | position: absolute; 31 | right: 0; 32 | } 33 | 34 | h2#cookie-policy-title.p-modal__title { 35 | align-self: flex-end; 36 | font-size: 1.5rem; 37 | font-style: normal; 38 | font-weight: 275; 39 | line-height: 2rem; 40 | margin: 0 0 1.05rem 0; 41 | padding: 0.45rem 0 0 0; 42 | } 43 | 44 | .cookie-policy p { 45 | font-size: 1rem; 46 | line-height: 1.5rem; 47 | margin-top: 0; 48 | padding-top: .4rem; 49 | } 50 | 51 | .cookie-policy p a { 52 | text-decoration: none; 53 | color: var(--color-link); 54 | } 55 | .cookie-policy button { 56 | border-style: solid; 57 | border-width: 1.5px; 58 | cursor: pointer; 59 | display: inline-block; 60 | font-size: 1rem; 61 | font-weight: 400; 62 | justify-content: center; 63 | line-height: 1.5rem; 64 | padding: calc(.4rem - 1px) 1rem; 65 | text-align: center; 66 | text-decoration: none; 67 | transition-duration: .1s; 68 | transition-property: background-color,border-color; 69 | transition-timing-function: cubic-bezier(0.55,0.055,0.675,0.19); 70 | } 71 | 72 | .cookie-policy button { 73 | background-color: #fff; 74 | border-color: rgba(0,0,0,0.56); 75 | color: #000; 76 | } 77 | 78 | .cookie-policy .p-button--positive { 79 | background-color: #0e8420; 80 | border-color: #0e8420; 81 | color: #fff; 82 | } 83 | -------------------------------------------------------------------------------- /docs/_templates/header.html: -------------------------------------------------------------------------------- 1 | 73 | -------------------------------------------------------------------------------- /docs/.sphinx/metrics/source_metrics.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # shellcheck disable=all 3 | 4 | VENV=".sphinx/venv/bin/activate" 5 | 6 | files=0 7 | words=0 8 | readabilityWords=0 9 | readabilitySentences=0 10 | readabilitySyllables=0 11 | readabilityAverage=0 12 | readable=true 13 | 14 | # measure number of files (.rst and .md), excluding those in .sphinx dir 15 | files=$(find . -type d -path './.sphinx' -prune -o -type f \( -name '*.md' -o -name '*.rst' \) -print | wc -l) 16 | 17 | # calculate metrics only if source files are present 18 | if [ "$files" -eq 0 ]; then 19 | echo "There are no source files to calculate metrics" 20 | else 21 | # measure raw total number of words, excluding those in .sphinx dir 22 | words=$(find . -type d -path './.sphinx' -prune -o \( -name '*.md' -o -name '*.rst' \) -exec cat {} + | wc -w) 23 | 24 | # calculate readability for markdown source files 25 | echo "Activating virtual environment to run vale..." 26 | source "${VENV}" 27 | 28 | for file in *.md *.rst; do 29 | if [ -f "$file" ]; then 30 | readabilityWords=$(vale ls-metrics "$file" | grep '"words"' | sed 's/[^0-9]*//g') 31 | readabilitySentences=$(vale ls-metrics "$file" | grep '"sentences"' | sed 's/[^0-9]*//g') 32 | readabilitySyllables=$(vale ls-metrics "$file" | grep '"syllables"' | sed 's/[^0-9]*//g') 33 | fi 34 | done 35 | 36 | echo "Deactivating virtual environment..." 37 | deactivate 38 | 39 | # calculate mean number of words 40 | if [ "$files" -ge 1 ]; then 41 | meanval=$((readabilityWords / files)) 42 | else 43 | meanval=$readabilityWords 44 | fi 45 | 46 | readabilityAverage=$(echo "scale=2; 0.39 * ($readabilityWords / $readabilitySentences) + (11.8 * ($readabilitySyllables / $readabilityWords)) - 15.59" | bc) 47 | 48 | # cast average to int for comparison 49 | readabilityAverageInt=$(echo "$readabilityAverage / 1" | bc) 50 | 51 | # value below 8 is considered readable 52 | if [ "$readabilityAverageInt" -lt 8 ]; then 53 | readable=true 54 | else 55 | readable=false 56 | fi 57 | 58 | # summarise latest metrics 59 | echo "Summarising metrics for source files (.md, .rst)..." 60 | echo -e "\ttotal files: $files" 61 | echo -e "\ttotal words (raw): $words" 62 | echo -e "\ttotal words (prose): $readabilityWords" 63 | echo -e "\taverage word count: $meanval" 64 | echo -e "\treadability: $readabilityAverage" 65 | echo -e "\treadable: $readable" 66 | fi 67 | -------------------------------------------------------------------------------- /docs/about.md: -------------------------------------------------------------------------------- 1 | --- 2 | myst: 3 | html_meta: 4 | "description": "Learn key concepts about airgapped environments, including the types of airgaps and deployment considerations." 5 | --- 6 | 7 | (about-top)= 8 | # About airgapped environments 9 | 10 | An airgapped environment is a type of environment in which systems are isolated from the public internet and, in some cases, from all external networks. This isolation prevents direct access to external services, which requires special considerations for installing, updating, and operating software. Many organizations use airgapped environments to meet security, compliance, or operational requirements. 11 | 12 | (about-types)= 13 | ## Types of airgapped environments 14 | 15 | There are two main types of airgapped environments: restricted network airgaps and fully disconnected airgaps. 16 | 17 | ### Restricted network airgap (controlled endpoint) 18 | 19 | In this model, the systems inside the airgaped environment don’t have direct access to the public internet. Instead, they can communicate with a designated external system by connecting through a controlled endpoint, such as a proxy server, gateway host, or firewall with allow-listed rules. 20 | 21 | This setup allows updates and data to flow in securely without exposing systems inside the environment to the internet. Since data can be synchronized through the controlled connection, this approach reduces the need for physical data transfers while still maintaining a strong security boundary. 22 | 23 | This type of airgapped environment is used when full isolation isn’t required, but tight control over external communication is still necessary. 24 | 25 | ### Fully disconnected airgap (manual data transfer) 26 | 27 | In this model, the systems inside the environment have no network connectivity to external systems and the public internet. All updates and data must be brought in manually, typically using removable storage or other physical media. 28 | 29 | This lack of any external connectivity provides the strongest network separation, but requires more careful planning to keep systems up to date. Administrators must regularly prepare and manually transfer updates into the environment. 30 | 31 | This type of airgapped environment is used when the highest level of network isolation is required. They’re also common in physically isolated or remote environments where connectivity is impractical. 32 | 33 | (about-considerations)= 34 | ## Considerations 35 | 36 | When planning to deploy Canonical products in an airgapped environment, keep in mind: 37 | 38 | - **Software updates** 39 | 40 | Updates (security patches, bug fixes, and new features) must be periodically mirrored or exported and imported into the environment. Depending on your deployment, this may involve tools such as [Landscape](https://ubuntu.com/landscape), [Enterprise Store](https://snapcraft.io/enterprise-store), repository mirrors, and container registries. 41 | 42 | - **Repository mirrors** 43 | 44 | Many products rely on external package repositories (e.g., APT, snaps, charm stores). You may need to set up internal mirrors or bring snapshots of these repositories into the environment. 45 | 46 | - **Internal networking** 47 | 48 | Even without access to the public internet, internal systems still need reliable network access to each other. Administrators should consult specific product documentation for any special networking requirements. -------------------------------------------------------------------------------- /docs/.sphinx/metrics/build_metrics.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import sys 4 | import argparse 5 | from pathlib import Path 6 | from html.parser import HTMLParser 7 | from urllib.parse import urlsplit 8 | 9 | 10 | class MetricsParser(HTMLParser): 11 | def __init__(self): 12 | super().__init__() 13 | self.int_link_count = 0 14 | self.ext_link_count = 0 15 | self.fragment_count = 0 16 | self.image_count = 0 17 | self.in_object = 0 18 | 19 | @property 20 | def link_count(self): 21 | return self.fragment_count + self.int_link_count + self.ext_link_count 22 | 23 | def read(self, file): 24 | """ 25 | Read *file* (a file-like object with a ``read`` method returning 26 | strings) a chunk at a time, feeding each chunk to the parser. 27 | """ 28 | # Ensure the parser state is reset before each file (just in case 29 | # there's an erroneous dangling ) 30 | self.reset() 31 | self.in_object = 0 32 | buf = '' 33 | while True: 34 | # Parse 1MB chunks at a time 35 | buf = file.read(1024**2) 36 | if not buf: 37 | break 38 | self.feed(buf) 39 | 40 | def handle_starttag(self, tag, attrs): 41 | """ 42 | Count , , and tags to determine the number of internal 43 | and external links, and the number of images. 44 | """ 45 | attrs = dict(attrs) 46 | if tag == 'a' and 'href' in attrs: 47 | # If there's no href, it's an anchor; if there's no hostname 48 | # (netloc) or path, it's just a fragment link within the page 49 | url = urlsplit(attrs['href']) 50 | if url.netloc: 51 | self.ext_link_count += 1 52 | elif url.path: 53 | self.int_link_count += 1 54 | else: 55 | self.fragment_count += 1 56 | elif tag == 'object': 57 | # tags are a bit complex as they nest to offer fallbacks 58 | # and may contain an fallback. We only want to count the 59 | # outer-most in this case 60 | if self.in_object == 0: 61 | self.image_count += 1 62 | self.in_object += 1 63 | elif tag == 'img' and self.in_object == 0: 64 | self.image_count += 1 65 | 66 | def handle_endtag(self, tag): 67 | if tag == 'object': 68 | # Never let in_object be negative 69 | self.in_object = max(0, self.in_object - 1) 70 | 71 | 72 | def main(args=None): 73 | parser = argparse.ArgumentParser() 74 | parser.add_argument( 75 | 'build_dir', metavar='build-dir', nargs='?', default='.', 76 | help="The directory to scan for HTML files") 77 | config = parser.parse_args(args) 78 | 79 | parser = MetricsParser() 80 | for path in Path(config.build_dir).rglob('*.html'): 81 | with path.open('r', encoding='utf-8', errors='replace') as f: 82 | parser.read(f) 83 | 84 | print('Summarising metrics for build files (.html)...') 85 | print(f'\tlinks: {parser.link_count} (' 86 | f'{parser.fragment_count} #frag…, ' 87 | f'{parser.int_link_count} /int…, ' 88 | f'{parser.ext_link_count} https://ext…' 89 | ')') 90 | print(f'\timages: {parser.image_count}') 91 | 92 | 93 | if __name__ == '__main__': 94 | sys.exit(main()) 95 | -------------------------------------------------------------------------------- /docs/_templates/footer.html: -------------------------------------------------------------------------------- 1 | 38 |
39 |
40 | {%- if show_copyright %} 41 | 52 | {%- endif %} 53 | 54 | {# mod: removed "Made with" #} 55 | 56 | {%- if last_updated -%} 57 |
58 | {% trans last_updated=last_updated|e -%} 59 | Last updated on {{ last_updated }} 60 | {%- endtrans -%} 61 |
62 | {%- endif %} 63 | 64 | {%- if show_source and has_source and sourcename %} 65 |
66 | Show source 68 |
69 | {%- endif %} 70 |
71 |
72 | {% if has_contributor_listing and display_contributors and pagename and page_source_suffix %} 73 | {% set contributors = get_contributors_for_file(pagename, page_source_suffix) %} 74 | {% if contributors %} 75 | {% if contributors | length > 1 %} 76 | Thanks to the {{ contributors |length }} contributors! 77 | {% else %} 78 | Thanks to our contributor! 79 | {% endif %} 80 |
81 |
    82 | {% for contributor in contributors %} 83 |
  • 84 | {{ contributor[0] }} 85 |
  • 86 | {% endfor %} 87 |
88 | {% endif %} 89 | {% endif %} 90 |
91 | 92 |
-------------------------------------------------------------------------------- /docs/products.md: -------------------------------------------------------------------------------- 1 | --- 2 | myst: 3 | html_meta: 4 | "description": "This page lists links to airgapped documentation for Canonical products within the respective documentation sets, such as Ubuntu Pro, Landscape, Juju, and more." 5 | --- 6 | 7 | (products-top)= 8 | # Airgapped documentation for specific products 9 | 10 | Find product-specific documentation regarding airgapped environments below. 11 | 12 | If a product is not seen in this list, that only means no airgapped documentation is yet available for that product. It might still be possible to use it in an airgapped environment. 13 | 14 | Note that any links in our [Support Portal](https://support-portal.canonical.com/) require a [Canonical Support contract](https://documentation.ubuntu.com/pro/support-overview/) to view. 15 | 16 | (products-pro-and-client)= 17 | ## Ubuntu Pro and Pro Client 18 | 19 | (products-pro)= 20 | ### Ubuntu Pro 21 | 22 | - [Ubuntu Pro for {spellexception}`airgapped` environments](https://documentation.ubuntu.com/pro/airgapped-setup/) 23 | - (Support Portal) [Get started with Ubuntu Pro in an airgapped environment](https://support-portal.canonical.com/knowledge-base/Get-Started-With-Ubuntu-Pro-in-an-Airgapped-Environment) 24 | 25 | (products-pro-client)= 26 | ### Ubuntu Pro Client 27 | 28 | - [Using Ubuntu Pro Client airgapped](https://documentation.ubuntu.com/pro-client/en/v32/explanations/using_pro_offline/) 29 | 30 | (products-pro-services)= 31 | ## Pro Services 32 | 33 | (products-security-standards)= 34 | ### ESM, FIPS, and other security standards 35 | 36 | - (Support Portal) [Enabling FIPS/FIPS-Updates in an offline environment](https://support-portal.canonical.com/knowledge-base/Install-and-enable-fips-in-an-offline-environment) 37 | 38 | (products-landscape)= 39 | ### Landscape 40 | 41 | - [Install Landscape in an airgapped environment](https://documentation.ubuntu.com/landscape/how-to-guides/landscape-installation-and-set-up/install-landscape-in-an-air-gapped-or-offline-environment/) 42 | - [Manage repositories in an airgapped environment](https://documentation.ubuntu.com/landscape/how-to-guides/repository-mirrors/manage-repositories-in-an-air-gapped-or-offline-environment/) 43 | 44 | (products-livepatch)= 45 | ### Livepatch 46 | 47 | - [Airgapped Livepatch on MicroK8s](https://ubuntu.com/security/livepatch/docs/livepatch_on_prem/tutorial/Getting%20started%20with%20air-gapped%20Livepatch%20and%20Microk8s) 48 | - [Airgapped Livepatch using Snaps](https://ubuntu.com/security/livepatch/docs/livepatch_on_prem/tutorial/Getting%20started%20with%20air-gapped%20Livepatch%20and%20Snap) 49 | - (Support Portal) [Livepatch on-premises deployment behind a web proxy](https://support-portal.canonical.com/knowledge-base/Livepatch-on-prem-deployment-behind-a-web-proxy) 50 | 51 | (products-core-airgapped)= 52 | ## Other core products for airgaps 53 | 54 | The following products are commonly used in airgapped environments, but are not part of the Ubuntu Pro services. 55 | 56 | (products-store)= 57 | ### Enterprise Store 58 | 59 | - [Getting started with an airgapped store](https://documentation.ubuntu.com/enterprise-store/main/tutorial/air-gapped-deployment/) 60 | - [Offline store (airgapped mode)](https://documentation.ubuntu.com/enterprise-store/main/how-to/airgap/) 61 | - [Offline Charmhub (airgapped mode)](https://documentation.ubuntu.com/enterprise-store/main/how-to/airgap-charmhub/) 62 | 63 | (products-juju)= 64 | ### Juju 65 | 66 | - [Set up your Juju deployment - offline](https://documentation.ubuntu.com/juju/latest/howto/manage-your-juju-deployment/set-up-your-juju-deployment-offline/) 67 | 68 | (products-maas)= 69 | ### MAAS 70 | 71 | - [How to set up airgapped MAAS](https://canonical.com/maas/docs/how-to-set-up-air-gapped-maas) 72 | 73 | (products-additional)= 74 | ## Additional products 75 | 76 | The following products can also be used in airgapped environments. 77 | 78 | (products-kubernetes)= 79 | ### Canonical Kubernetes 80 | 81 | - [Install Canonical Kubernetes in airgapped environments](https://documentation.ubuntu.com/canonical-kubernetes/latest/snap/howto/install/offline/) 82 | - [Configure a custom registry](https://documentation.ubuntu.com/canonical-kubernetes/latest/charm/howto/custom-registry/) 83 | 84 | (products-openstack)= 85 | ### Canonical OpenStack 86 | 87 | - [Airgapped and offline deployments](https://canonical-openstack.readthedocs-hosted.com/en/latest/explanation/design-considerations/#air-gapped-and-offline-deployments) 88 | 89 | (products-charmed-kubeflow)= 90 | ### Charmed Kubeflow 91 | 92 | - [Install in an airgapped environment](https://documentation.ubuntu.com/charmed-kubeflow/how-to/install/install-air-gapped/) 93 | 94 | (products-charmed-kubernetes)= 95 | ### Charmed Kubernetes 96 | 97 | - [Install Charmed Kubernetes offline](https://ubuntu.com/kubernetes/charmed-k8s/docs/install-offline) 98 | 99 | (products-charmed-mysql)= 100 | ### Charmed MySQL 101 | 102 | - [Deploy in an offline or airgapped environment](https://canonical-charmed-mysql.readthedocs-hosted.com/how-to/deploy/air-gapped/index.html) 103 | 104 | (products-charmed-postgresql)= 105 | ### Charmed PostgreSQL 106 | 107 | - [Deploy in an offline or airgapped environment](https://canonical-charmed-postgresql.readthedocs-hosted.com/14/how-to/deploy/air-gapped/) 108 | 109 | (products-microk8s)= 110 | ### MicroK8s 111 | 112 | - [Install MicroK8s in an offline or airgapped environment](https://microk8s.io/docs/install-offline) -------------------------------------------------------------------------------- /docs/.sphinx/get_vale_conf.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | import os 4 | import shutil 5 | import subprocess 6 | import tempfile 7 | import sys 8 | import logging 9 | import argparse 10 | 11 | # Configure logging 12 | logging.basicConfig( 13 | level=logging.INFO, 14 | format='%(asctime)s - %(levelname)s - %(message)s', 15 | datefmt='%Y-%m-%d %H:%M:%S' 16 | ) 17 | 18 | SPHINX_DIR = os.path.join(os.getcwd(), ".sphinx") 19 | 20 | GITHUB_REPO = "canonical/documentation-style-guide" 21 | GITHUB_CLONE_URL = f"https://github.com/{GITHUB_REPO}.git" 22 | 23 | # Source paths to copy from repo 24 | VALE_FILE_LIST = [ 25 | "styles/Canonical", 26 | "styles/config/vocabularies/Canonical", 27 | "styles/config/dictionaries", 28 | "vale.ini" 29 | ] 30 | 31 | def clone_repo_and_copy_paths(file_source_dest, overwrite=False): 32 | """ 33 | Clone the repository to a temporary directory and copy required files 34 | 35 | Args: 36 | file_source_dest: dictionary of file paths to copy from the repository, 37 | and their destination paths 38 | overwrite: boolean flag to overwrite existing files in the destination 39 | 40 | Returns: 41 | bool: True if all files were copied successfully, False otherwise 42 | """ 43 | 44 | if not file_source_dest: 45 | logging.error("No files to copy") 46 | return False 47 | 48 | # Create temporary directory on disk for cloning 49 | temp_dir = tempfile.mkdtemp() 50 | logging.info("Cloning repository <%s> to temporary directory: %s", GITHUB_REPO, temp_dir) 51 | clone_cmd = ["git", "clone", "--depth", "1", GITHUB_CLONE_URL, temp_dir] 52 | 53 | try: 54 | result = subprocess.run( 55 | clone_cmd, 56 | capture_output=True, 57 | text=True, 58 | check=True 59 | ) 60 | logging.debug("Git clone output: %s", result.stdout) 61 | except subprocess.CalledProcessError as e: 62 | logging.error("Git clone failed: %s", e.stderr) 63 | return False 64 | 65 | # Copy files from the cloned repository to the destination paths 66 | is_copy_success = True 67 | for source, dest in file_source_dest.items(): 68 | source_path = os.path.join(temp_dir, source) 69 | 70 | if not os.path.exists(source_path): 71 | is_copy_success = False 72 | logging.error("Source path not found: %s", source_path) 73 | continue 74 | 75 | if not copy_files_to_path(source_path, dest, overwrite): 76 | is_copy_success = False 77 | logging.error("Failed to copy %s to %s", source_path, dest) 78 | 79 | # Clean up temporary directory 80 | logging.info("Cleaning up temporary directory: %s", temp_dir) 81 | shutil.rmtree(temp_dir) 82 | 83 | return is_copy_success 84 | 85 | def copy_files_to_path(source_path, dest_path, overwrite=False): 86 | """ 87 | Copy a file or directory from source to destination 88 | 89 | Args: 90 | source_path: Path to the source file or directory 91 | dest_path: Path to the destination 92 | overwrite: Boolean flag to overwrite existing files in the destination 93 | 94 | Returns: 95 | bool: True if copy was successful, False otherwise 96 | """ 97 | # Skip if source file doesn't exist 98 | if not os.path.exists(source_path): 99 | logging.warning("Source path not found: %s", source_path) 100 | return False 101 | 102 | logging.info("Copying %s to %s", source_path, dest_path) 103 | # Handle existing files 104 | if os.path.exists(dest_path): 105 | if overwrite: 106 | logging.info(" Destination exists, overwriting: %s", dest_path) 107 | if os.path.isdir(dest_path): 108 | shutil.rmtree(dest_path) 109 | else: 110 | os.remove(dest_path) 111 | else: 112 | logging.info(" Destination exists, skip copying (use overwrite=True to replace): %s", 113 | dest_path) 114 | return True # Skip copying 115 | 116 | # Copy the source to destination 117 | try: 118 | if os.path.isdir(source_path): 119 | # entire directory 120 | shutil.copytree(source_path, dest_path) 121 | else: 122 | # individual files 123 | shutil.copy2(source_path, dest_path) 124 | return True 125 | except (shutil.Error, OSError) as e: 126 | logging.error("Copy failed: %s", e) 127 | return False 128 | 129 | def parse_arguments(): 130 | parser = argparse.ArgumentParser(description="Download Vale configuration files") 131 | parser.add_argument("--no-overwrite", action="store_true", help="Don't overwrite existing files") 132 | return parser.parse_args() 133 | 134 | def main(): 135 | # Define local directory paths 136 | vale_files_dict = {file: os.path.join(SPHINX_DIR, file) for file in VALE_FILE_LIST} 137 | 138 | # Parse command line arguments, default to overwrite_enabled = True 139 | overwrite_enabled = not parse_arguments().no_overwrite 140 | 141 | # Download into /tmp through git clone 142 | if not clone_repo_and_copy_paths(vale_files_dict, overwrite=overwrite_enabled): 143 | logging.error("Failed to download files from repository") 144 | return 1 145 | 146 | logging.info("Download complete") 147 | return 0 148 | 149 | 150 | if __name__ == "__main__": 151 | sys.exit(main()) # Keep return code 152 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | # Add your customisation to `Makefile` instead. 4 | 5 | # You can set these variables from the command line, and also 6 | # from the environment for the first two. 7 | SPHINXDIR = .sphinx 8 | SPHINXOPTS ?= -c . -d $(SPHINXDIR)/.doctrees -j auto 9 | SPHINXBUILD ?= $(VENVDIR)/bin/sphinx-build 10 | SOURCEDIR = . 11 | BUILDDIR = _build 12 | VENVDIR = $(SPHINXDIR)/venv 13 | PA11Y = $(SPHINXDIR)/node_modules/pa11y/bin/pa11y.js --config $(SPHINXDIR)/pa11y.json 14 | VENV = $(VENVDIR)/bin/activate 15 | TARGET = * 16 | ALLFILES = *.rst **/*.rst 17 | METRICSDIR = $(SOURCEDIR)/.sphinx/metrics 18 | REQPDFPACKS = latexmk fonts-freefont-otf texlive-latex-recommended texlive-latex-extra texlive-fonts-recommended texlive-font-utils texlive-lang-cjk texlive-xetex plantuml xindy tex-gyre dvipng 19 | CONFIRM_SUDO ?= N 20 | VALE_CONFIG = $(SPHINXDIR)/vale.ini 21 | SPHINX_HOST ?= 127.0.0.1 22 | SPHINX_PORT ?= 8000 23 | 24 | # Put it first so that "make" without argument is like "make help". 25 | help: 26 | @echo 27 | @echo "-------------------------------------------------------------" 28 | @echo "* watch, build and serve the documentation: make run" 29 | @echo "* only build: make html" 30 | @echo "* only serve: make serve" 31 | @echo "* clean built doc files: make clean-doc" 32 | @echo "* clean full environment: make clean" 33 | @echo "* check links: make linkcheck" 34 | @echo "* check markdown: make lint-md" 35 | @echo "* check spelling: make spelling" 36 | @echo "* check spelling (without building again): make spellcheck" 37 | @echo "* check inclusive language: make woke" 38 | @echo "* check accessibility: make pa11y" 39 | @echo "* check style guide compliance: make vale" 40 | @echo "* check style guide compliance on target: make vale TARGET=*" 41 | @echo "* check metrics for documentation: make allmetrics" 42 | @echo "* other possible targets: make " 43 | @echo "-------------------------------------------------------------" 44 | @echo 45 | 46 | .PHONY: help full‑help html epub pdf linkcheck spelling spellcheck woke \ 47 | vale pa11y run serve install pa11y‑install \ 48 | vale‑install pdf‑prep pdf‑prep‑force clean clean‑doc allmetrics \ 49 | update lint-md 50 | 51 | full-help: $(VENVDIR) 52 | @. $(VENV); $(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 53 | @echo "\n\033[1;31mNOTE: This help texts shows unsupported targets!\033[0m" 54 | @echo "Run 'make help' to see supported targets." 55 | 56 | # If requirements are updated, venv should be rebuilt and timestamped. 57 | $(VENVDIR): 58 | @echo "... setting up virtualenv" 59 | python3 -m venv $(VENVDIR) || { echo "You must install python3-venv before you can build the documentation."; exit 1; } 60 | . $(VENV); pip install $(PIPOPTS) --require-virtualenv \ 61 | --upgrade -r requirements.txt \ 62 | --log $(VENVDIR)/pip_install.log 63 | @test ! -f $(VENVDIR)/pip_list.txt || \ 64 | mv $(VENVDIR)/pip_list.txt $(VENVDIR)/pip_list.txt.bak 65 | @. $(VENV); pip list --local --format=freeze > $(VENVDIR)/pip_list.txt 66 | @touch $(VENVDIR) 67 | 68 | pa11y-install: 69 | @command -v $(PA11Y) >/dev/null || { \ 70 | echo "Installing \"pa11y\" from npm..."; echo; \ 71 | mkdir -p $(SPHINXDIR)/node_modules/ ; \ 72 | npm install --prefix $(SPHINXDIR) pa11y; \ 73 | } 74 | 75 | pymarkdownlnt-install: 76 | @. $(VENV); test -d $(SPHINXDIR)/venv/lib/python*/site-packages/pymarkdown || pip install pymarkdownlnt 77 | 78 | install: $(VENVDIR) 79 | 80 | run: install 81 | . $(VENV); $(VENVDIR)/bin/sphinx-autobuild -b dirhtml --host $(SPHINX_HOST) --port $(SPHINX_PORT) "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 82 | 83 | # Does not depend on $(BUILDDIR) to rebuild properly at every run. 84 | html: install 85 | . $(VENV); $(SPHINXBUILD) --fail-on-warning --keep-going -b dirhtml "$(SOURCEDIR)" "$(BUILDDIR)" -w $(SPHINXDIR)/warnings.txt $(SPHINXOPTS) 86 | 87 | epub: install 88 | . $(VENV); $(SPHINXBUILD) -b epub "$(SOURCEDIR)" "$(BUILDDIR)" -w $(SPHINXDIR)/warnings.txt $(SPHINXOPTS) 89 | 90 | serve: html 91 | cd "$(BUILDDIR)"; python3 -m http.server --bind 127.0.0.1 8000 92 | 93 | clean: clean-doc 94 | @test ! -e "$(VENVDIR)" -o -d "$(VENVDIR)" -a "$(abspath $(VENVDIR))" != "$(VENVDIR)" 95 | rm -rf $(VENVDIR) 96 | rm -rf $(SPHINXDIR)/node_modules/ 97 | rm -rf $(SPHINXDIR)/styles 98 | rm -rf $(VALE_CONFIG) 99 | 100 | clean-doc: 101 | git clean -fx "$(BUILDDIR)" 102 | rm -rf $(SPHINXDIR)/.doctrees 103 | 104 | linkcheck: install 105 | . $(VENV) ; $(SPHINXBUILD) -b linkcheck "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) || { grep --color -F "[broken]" "$(BUILDDIR)/output.txt"; exit 1; } 106 | exit 0 107 | 108 | pa11y: pa11y-install html 109 | find $(BUILDDIR) -name *.html -print0 | xargs -n 1 -0 $(PA11Y) 110 | 111 | lint-md: pymarkdownlnt-install 112 | @. $(VENV); pymarkdownlnt --config $(SPHINXDIR)/.pymarkdown.json scan --recurse --exclude=./$(SPHINXDIR)/** $(SOURCEDIR) 113 | 114 | vale-install: install 115 | @. $(VENV); test -d $(SPHINXDIR)/venv/lib/python*/site-packages/vale || pip install rst2html vale 116 | @. $(VENV); test -f $(VALE_CONFIG) || python3 $(SPHINXDIR)/get_vale_conf.py 117 | @echo '.Name=="Canonical.400-Enforce-inclusive-terms"' > $(SPHINXDIR)/styles/woke.filter 118 | @echo '.Level=="error" and .Name!="Canonical.500-Repeated-words" and .Name!="Canonical.000-US-spellcheck"' > $(SPHINXDIR)/styles/error.filter 119 | @echo '.Name=="Canonical.000-US-spellcheck"' > $(SPHINXDIR)/styles/spelling.filter 120 | @. $(VENV); find $(SPHINXDIR)/venv/lib/python*/site-packages/vale/vale_bin -size 195c -exec vale --version \; 121 | 122 | woke: vale-install 123 | @cat $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt > $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt 124 | @cat $(SOURCEDIR)/.custom_wordlist.txt >> $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt 125 | @echo "Running Vale acceptable term check against $(TARGET). To change target set TARGET= with make command" 126 | @. $(VENV); vale --config="$(VALE_CONFIG)" --filter='$(SPHINXDIR)/styles/woke.filter' --glob='*.{md,rst}' $(TARGET) 127 | @cat $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt > $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt && rm $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt 128 | 129 | vale: vale-install 130 | @cat $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt > $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt 131 | @cat $(SOURCEDIR)/.custom_wordlist.txt >> $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt 132 | @echo "Running Vale against $(TARGET). To change target set TARGET= with make command" 133 | @. $(VENV); vale --config="$(VALE_CONFIG)" --filter='$(SPHINXDIR)/styles/error.filter' --glob='*.{md,rst}' $(TARGET) 134 | @cat $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt > $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt && rm $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt 135 | 136 | spelling: vale-install 137 | @cat $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt > $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt 138 | @cat $(SOURCEDIR)/.custom_wordlist.txt >> $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt 139 | @echo "Running Vale against $(TARGET). To change target set TARGET= with make command" 140 | @. $(VENV); vale --config="$(VALE_CONFIG)" --filter='$(SPHINXDIR)/styles/spelling.filter' --glob='*.{md,rst}' $(TARGET) 141 | @cat $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt > $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt && rm $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt 142 | 143 | spellcheck: spelling 144 | @echo "Please note that the \`make spellcheck\` command is being deprecated in favor of \`make spelling\`" 145 | 146 | pdf-prep: install 147 | @for packageName in $(REQPDFPACKS); do (dpkg-query -W -f='$${Status}' $$packageName 2>/dev/null | \ 148 | grep -c "ok installed" >/dev/null && echo "Package $$packageName is installed") && continue || \ 149 | (echo; echo "PDF generation requires the installation of the following packages: $(REQPDFPACKS)" && \ 150 | echo "" && echo "Run 'sudo make pdf-prep-force' to install these packages" && echo "" && echo \ 151 | "Please be aware these packages will be installed to your system") && exit 1 ; done 152 | 153 | pdf-prep-force: 154 | apt-get update 155 | apt-get upgrade -y 156 | apt-get install --no-install-recommends -y $(REQPDFPACKS) \ 157 | 158 | pdf: pdf-prep 159 | @. $(VENV); sphinx-build -M latexpdf "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 160 | @rm ./$(BUILDDIR)/latex/front-page-light.pdf || true 161 | @rm ./$(BUILDDIR)/latex/normal-page-footer.pdf || true 162 | @find ./$(BUILDDIR)/latex -name "*.pdf" -exec mv -t ./$(BUILDDIR) {} + 163 | @rm -r $(BUILDDIR)/latex 164 | @echo 165 | @echo "Output can be found in ./$(BUILDDIR)" 166 | @echo 167 | 168 | allmetrics: html 169 | @echo "Recording documentation metrics..." 170 | @echo "Checking for existence of vale..." 171 | . $(VENV) 172 | @. $(VENV); test -d $(SPHINXDIR)/venv/lib/python*/site-packages/vale || pip install vale 173 | @. $(VENV); test -f $(VALE_CONFIG) || python3 $(SPHINXDIR)/get_vale_conf.py 174 | @. $(VENV); find $(SPHINXDIR)/venv/lib/python*/site-packages/vale/vale_bin -size 195c -exec vale --config "$(VALE_CONFIG)" $(TARGET) > /dev/null \; 175 | @eval '$(METRICSDIR)/source_metrics.sh $(PWD)' 176 | @$(METRICSDIR)/build_metrics.py $(BUILDDIR) 177 | 178 | update: install 179 | @. $(VENV); .sphinx/update_sp.py 180 | 181 | # Catch-all target: route all unknown targets to Sphinx using the new 182 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 183 | %: 184 | $(MAKE) —no-print-directory install 185 | . $(VENV); $(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 186 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import yaml 4 | 5 | # Configuration for the Sphinx documentation builder. 6 | # All configuration specific to your project should be done in this file. 7 | # 8 | # If you're new to Sphinx and don't want any advanced or custom features, 9 | # just go through the items marked 'TODO'. 10 | # 11 | # A complete list of built-in Sphinx configuration values: 12 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 13 | # 14 | # Our starter pack uses the custom Canonical Sphinx extension 15 | # to keep all documentation based on it consistent and on brand: 16 | # https://github.com/canonical/canonical-sphinx 17 | 18 | 19 | ####################### 20 | # Project information # 21 | ####################### 22 | 23 | # Project name 24 | # 25 | # TODO: Update with the official name of your project or product 26 | 27 | project = "Airgapped documentation" 28 | author = "Canonical Ltd." 29 | 30 | 31 | # Sidebar documentation title; best kept reasonably short 32 | # 33 | # TODO: To include a version number, add it here (hardcoded or automated). 34 | # 35 | # TODO: To disable the title, set to an empty string. 36 | 37 | html_title = project 38 | 39 | 40 | # Copyright string; shown at the bottom of the page 41 | # 42 | # Now, the starter pack uses CC-BY-SA as the license 43 | # and the current year as the copyright year. 44 | # 45 | # TODO: If your docs need another license, specify it instead of 'CC-BY-SA'. 46 | # 47 | # TODO: If your documentation is a part of the code repository of your project, 48 | # it inherits the code license instead; specify it instead of 'CC-BY-SA'. 49 | # 50 | # NOTE: For static works, it is common to provide the first publication year. 51 | # Another option is to provide both the first year of publication 52 | # and the current year, especially for docs that frequently change, 53 | # e.g. 2022–2023 (note the en-dash). 54 | # 55 | # A way to check a repo's creation date is to get a classic GitHub token 56 | # with 'repo' permissions; see https://github.com/settings/tokens 57 | # Next, use 'curl' and 'jq' to extract the date from the API's output: 58 | # 59 | # curl -H 'Authorization: token ' \ 60 | # -H 'Accept: application/vnd.github.v3.raw' \ 61 | # https://api.github.com/repos/canonical/ | jq '.created_at' 62 | 63 | copyright = "%s CC-BY-SA, %s" % (datetime.date.today().year, author) 64 | 65 | 66 | # Documentation website URL 67 | # 68 | # TODO: Update with the official URL of your docs or leave empty if unsure. 69 | # 70 | # NOTE: The Open Graph Protocol (OGP) enhances page display in a social graph 71 | # and is used by social media platforms; see https://ogp.me/ 72 | 73 | ogp_site_url = "https://documentation.ubuntu.com/airgapped/" 74 | 75 | 76 | # Preview name of the documentation website 77 | # 78 | # TODO: To use a different name for the project in previews, update as needed. 79 | 80 | ogp_site_name = project 81 | 82 | 83 | # Preview image URL 84 | # 85 | # TODO: To customise the preview image, update as needed. 86 | 87 | ogp_image = "https://assets.ubuntu.com/v1/253da317-image-document-ubuntudocs.svg" 88 | 89 | 90 | # Product favicon; shown in bookmarks, browser tabs, etc. 91 | 92 | # TODO: To customise the favicon, uncomment and update as needed. 93 | 94 | # html_favicon = '.sphinx/_static/favicon.png' 95 | 96 | 97 | # Dictionary of values to pass into the Sphinx context for all pages: 98 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#confval-html_context 99 | 100 | html_context = { 101 | # Product page URL; can be different from product docs URL 102 | # 103 | # TODO: Change to your product website URL, 104 | # dropping the 'https://' prefix, e.g. 'ubuntu.com/lxd'. 105 | # 106 | # TODO: If there's no such website, 107 | # remove the {{ product_page }} link from the page header template 108 | # (usually .sphinx/_templates/header.html; also, see README.rst). 109 | "product_page": "documentation.ubuntu.com", 110 | # Product tag image; the orange part of your logo, shown in the page header 111 | # 112 | # TODO: To add a tag image, uncomment and update as needed. 113 | # 'product_tag': '_static/tag.png', 114 | # Your Discourse instance URL 115 | # 116 | # TODO: Change to your Discourse instance URL or leave empty. 117 | # 118 | # NOTE: If set, adding ':discourse: 123' to an .rst file 119 | # will add a link to Discourse topic 123 at the bottom of the page. 120 | "discourse": "https://discourse.ubuntu.com", 121 | # Your Matrix channel URL 122 | # 123 | # TODO: Change to your Matrix channel URL or leave empty. 124 | "matrix": "https://matrix.to/#/#documentation:ubuntu.com", 125 | # Your documentation GitHub repository URL 126 | # 127 | # TODO: Change to your documentation GitHub repository URL or leave empty. 128 | # 129 | # NOTE: If set, links for viewing the documentation source files 130 | # and creating GitHub issues are added at the bottom of each page. 131 | "github_url": "https://github.com/canonical/airgapped-docs", 132 | # Docs branch in the repo; used in links for viewing the source files 133 | # 134 | # TODO: To customise the branch, uncomment and update as needed. 135 | 'repo_default_branch': 'main', 136 | # Docs location in the repo; used in links for viewing the source files 137 | # 138 | 139 | 140 | # TODO: To customise the directory, uncomment and update as needed. 141 | "repo_folder": "/docs/", 142 | # TODO: To enable or disable the Previous / Next buttons at the bottom of pages 143 | # Valid options: none, prev, next, both 144 | # "sequential_nav": "both", 145 | # TODO: To enable listing contributors on individual pages, set to True 146 | "display_contributors": False, 147 | 148 | # Required for feedback button 149 | 'github_issues': 'enabled', 150 | } 151 | 152 | # TODO: To enable the edit button on pages, uncomment and change the link to a 153 | # public repository on GitHub or Launchpad. Any of the following link domains 154 | # are accepted: 155 | # - https://github.com/example-org/example" 156 | # - https://launchpad.net/example 157 | # - https://git.launchpad.net/example 158 | # 159 | # html_theme_options = { 160 | # 'source_edit_link': 'https://github.com/canonical/sphinx-docs-starter-pack', 161 | # } 162 | 163 | # Project slug; see https://meta.discourse.org/t/what-is-category-slug/87897 164 | # 165 | # TODO: If your documentation is hosted on https://docs.ubuntu.com/, 166 | # uncomment and update as needed. 167 | 168 | slug = 'airgapped' 169 | 170 | ####################### 171 | # Sitemap configuration: https://sphinx-sitemap.readthedocs.io/ 172 | ####################### 173 | 174 | # Base URL of RTD hosted project 175 | 176 | html_baseurl = 'https://documentation.ubuntu.com/airgapped/' 177 | 178 | sitemap_url_scheme = '{link}' 179 | 180 | # Include `lastmod` dates in the sitemap: 181 | 182 | sitemap_show_lastmod = True 183 | 184 | # Exclude generated pages from the sitemap: 185 | 186 | sitemap_excludes = [ 187 | '404/', 188 | 'genindex/', 189 | 'search/', 190 | ] 191 | 192 | # TODO: Add more pages to sitemap_excludes if needed. Wildcards are supported. 193 | # For example, to exclude module pages generated by autodoc, add '_modules/*'. 194 | 195 | ####################### 196 | # Template and asset locations 197 | ####################### 198 | 199 | html_static_path = ["_static"] 200 | templates_path = ["_templates"] 201 | 202 | 203 | ############# 204 | # Redirects # 205 | ############# 206 | 207 | # To set up redirects: https://documatt.gitlab.io/sphinx-reredirects/usage.html 208 | # For example: 'explanation/old-name.html': '../how-to/prettify.html', 209 | 210 | # To set up redirects in the Read the Docs project dashboard: 211 | # https://docs.readthedocs.io/en/stable/guides/redirects.html 212 | 213 | # NOTE: If undefined, set to None, or empty, 214 | # the sphinx_reredirects extension will be disabled. 215 | 216 | redirects = {} 217 | 218 | 219 | ########################### 220 | # Link checker exceptions # 221 | ########################### 222 | 223 | # A regex list of URLs that are ignored by 'make linkcheck' 224 | # 225 | # TODO: Remove or adjust the ACME entry after you update the contributing guide 226 | 227 | linkcheck_ignore = [ 228 | "http://127.0.0.1:8000", 229 | "https://support.canonical.com/", 230 | "https://support-portal.canonical.com/", 231 | ] 232 | 233 | 234 | # A regex list of URLs where anchors are ignored by 'make linkcheck' 235 | 236 | linkcheck_anchors_ignore_for_url = [r"https://github\.com/.*"] 237 | 238 | # give linkcheck multiple tries on failure 239 | # linkcheck_timeout = 30 240 | linkcheck_retries = 3 241 | 242 | ######################## 243 | # Configuration extras # 244 | ######################## 245 | 246 | # Custom MyST syntax extensions; see 247 | # https://myst-parser.readthedocs.io/en/latest/syntax/optional.html 248 | # 249 | # NOTE: By default, the following MyST extensions are enabled: 250 | # substitution, deflist, linkify 251 | 252 | # myst_enable_extensions = set() 253 | 254 | 255 | # Custom Sphinx extensions; see 256 | # https://www.sphinx-doc.org/en/master/usage/extensions/index.html 257 | 258 | # NOTE: The canonical_sphinx extension is required for the starter pack. 259 | # It automatically enables the following extensions: 260 | # - custom-rst-roles 261 | # - myst_parser 262 | # - notfound.extension 263 | # - related-links 264 | # - sphinx_copybutton 265 | # - sphinx_design 266 | # - sphinx_reredirects 267 | # - sphinx_tabs.tabs 268 | # - sphinxcontrib.jquery 269 | # - sphinxext.opengraph 270 | # - terminal-output 271 | # - youtube-links 272 | 273 | extensions = [ 274 | "canonical_sphinx", 275 | "sphinxcontrib.cairosvgconverter", 276 | "sphinx_last_updated_by_git", 277 | "sphinx.ext.intersphinx", 278 | "sphinx_sitemap", 279 | ] 280 | 281 | # Excludes files or directories from processing 282 | 283 | exclude_patterns = [] 284 | 285 | # Adds custom CSS files, located under 'html_static_path' 286 | 287 | html_css_files = [ 288 | 'cookie-banner.css' 289 | ] 290 | 291 | # Adds custom JavaScript files, located under 'html_static_path' 292 | 293 | html_js_files = [ 294 | 'js/bundle.js', 295 | ] 296 | 297 | # Feedback button at the top; enabled by default 298 | # 299 | # TODO: To disable the button, uncomment this. 300 | 301 | # disable_feedback_button = True 302 | 303 | 304 | # Workaround for https://github.com/canonical/canonical-sphinx/issues/34 305 | 306 | if "discourse_prefix" not in html_context and "discourse" in html_context: 307 | html_context["discourse_prefix"] = html_context["discourse"] + "/t/" 308 | 309 | # Workaround for substitutions.yaml 310 | 311 | if os.path.exists('./reuse/substitutions.yaml'): 312 | with open('./reuse/substitutions.yaml', 'r') as fd: 313 | myst_substitutions = yaml.safe_load(fd.read()) 314 | -------------------------------------------------------------------------------- /docs/.sphinx/update_sp.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | # Initial update script for the starter pack. 4 | # 5 | # Requires some manual intervention, but makes identifying updates and differences easier. 6 | # 7 | # For debugging, please run this script with DEBUGGING=1 8 | # e.g. user@device:~/git/Canonical/sphinx-docs-starter-pack/docs$ DEBUGGING=1 python .sphinx/update_sp.py 9 | 10 | 11 | import glob 12 | import logging 13 | import os 14 | import requests 15 | import re 16 | import subprocess 17 | import sys 18 | from requests.exceptions import RequestException 19 | from packaging.version import parse as parse_version 20 | 21 | SPHINX_DIR = os.path.join(os.getcwd(), ".sphinx") 22 | SPHINX_UPDATE_DIR = os.path.join(SPHINX_DIR, "update") 23 | GITHUB_REPO = "canonical/sphinx-docs-starter-pack" 24 | GITHUB_API_BASE = f"https://api.github.com/repos/{GITHUB_REPO}" 25 | GITHUB_API_SPHINX_DIR = f"{GITHUB_API_BASE}/contents/docs/.sphinx" 26 | GITHUB_RAW_BASE = f"https://raw.githubusercontent.com/{GITHUB_REPO}/main" 27 | 28 | TIMEOUT = 10 # seconds 29 | 30 | # Check if debugging 31 | if os.getenv("DEBUGGING"): 32 | logging.basicConfig(level=logging.DEBUG) 33 | 34 | 35 | def main(): 36 | # Check local version 37 | logging.debug("Checking local version") 38 | try: 39 | with open(os.path.join(SPHINX_DIR, "version")) as f: 40 | local_version = f.read().strip() 41 | except FileNotFoundError: 42 | print("WARNING\nWARNING\nWARNING") 43 | print( 44 | "You need to update to at least version 1.0.0 of the starter pack to start using the update function." 45 | ) 46 | print("You may experience issues using this functionality.") 47 | logging.debug("No local version found. Setting version to None") 48 | local_version = "None" 49 | except Exception as e: 50 | logging.debug(e) 51 | raise Exception("ERROR executing check local version") 52 | logging.debug(f"Local version = {local_version}") 53 | 54 | # Check release version 55 | latest_release = query_api(GITHUB_API_BASE + "/releases/latest").json()["tag_name"] 56 | logging.debug(f"Latest release = {latest_release}") 57 | 58 | # Perform actions only if local version is older than release version 59 | logging.debug("Comparing versions") 60 | if parse_version(local_version) < parse_version(latest_release): 61 | logging.debug("Local version is older than the release version.") 62 | print("Starter pack is out of date.\n") 63 | 64 | # Identify and download '.sphinx' dir files to '.sphinx/update' 65 | files_updated, new_files = update_static_files() 66 | 67 | # Write new version to file to '.sphinx/update' 68 | 69 | download_file( 70 | GITHUB_RAW_BASE + "/docs/.sphinx/version", 71 | os.path.join(SPHINX_UPDATE_DIR, "version"), 72 | ) 73 | 74 | # Provide changelog to identify other significant changes 75 | changelog = query_api(GITHUB_RAW_BASE + "/CHANGELOG.md") 76 | logging.debug("Changelog obtained") 77 | version_regex = re.compile(r"#+ +" + re.escape(local_version) + r" *\n") 78 | print("SEE CURRENT CHANGELOG:") 79 | print(re.split(version_regex, changelog.text)[0]) 80 | 81 | # Provide information on any files identified for updates 82 | if files_updated: 83 | logging.debug("Updated files found and downloaded") 84 | print("Differences have been identified in static files.") 85 | print("Updated files have been downloaded to '.sphinx/update'.") 86 | print("Validate and move these files into your '.sphinx/' directory.") 87 | else: 88 | logging.debug("No files found to update") 89 | # Provide information on NEW files 90 | if new_files: 91 | logging.debug("New files found and downloaded") 92 | print( 93 | "NOTE: New files have been downloaded\n", 94 | "See 'NEWFILES.txt' for all downloaded files\n", 95 | "Validate and merge these files into your '.sphinx/' directory", 96 | ) 97 | else: 98 | logging.debug("No new files found to download") 99 | else: 100 | logging.debug("Local version and release version are the same") 101 | print("This version is up to date.") 102 | 103 | # Check requirements are the same 104 | new_requirements = [] 105 | try: 106 | with open("requirements.txt", "r") as file: 107 | logging.debug("Checking requirements") 108 | 109 | local_reqs = set(file.read().splitlines()) - {""} 110 | requirements = set( 111 | query_api(GITHUB_RAW_BASE + "/docs/requirements.txt").text.splitlines() 112 | ) 113 | 114 | new_requirements = requirements - local_reqs 115 | 116 | for req in new_requirements: 117 | logging.debug(f"{req} not found in local requirements.txt") 118 | 119 | for req in requirements & local_reqs: 120 | logging.debug(f"{req} already exists in local requirements.txt") 121 | 122 | if new_requirements != set(): 123 | print( 124 | "You may need to add the following packages to your requirements.txt file:" 125 | ) 126 | for r in new_requirements: 127 | print(f"{r}\n") 128 | except FileNotFoundError: 129 | print("requirements.txt not found") 130 | print( 131 | "The updated starter pack has moved requirements.txt out of the '.sphinx' dir" 132 | ) 133 | print("requirements.txt not checked, please update your requirements manually") 134 | 135 | 136 | def update_static_files(): 137 | """Checks local files against remote for new and different files, downloads to '.sphinx/updates'""" 138 | files, paths = get_local_files_and_paths() 139 | new_file_list = [] 140 | 141 | for item in query_api(GITHUB_API_SPHINX_DIR).json(): 142 | logging.debug(f"Checking {item['name']}") 143 | # Checks existing files in '.sphinx' starter pack static root for changed SHA 144 | if item["name"] in files and item["type"] == "file": 145 | index = files.index(item["name"]) 146 | if item["sha"] != get_git_revision_hash(paths[index]): 147 | logging.debug(f"Local {item['name']} is different to remote") 148 | download_file( 149 | item["download_url"], os.path.join(SPHINX_UPDATE_DIR, item["name"]) 150 | ) 151 | if item["name"] == "update_sp.py": 152 | # Indicate update script needs to be updated and re-run 153 | print("WARNING") 154 | print( 155 | "THIS UPDATE SCRIPT IS OUT OF DATE. YOU MAY NEED TO RUN ANOTHER UPDATE AFTER UPDATING TO THE FILE IN '.sphinx/updates'." 156 | ) 157 | print("WARNING\n") 158 | else: 159 | logging.debug("File hashes are equal") 160 | # Checks nested files '.sphinx/**/**.*' for changed SHA (single level of depth) 161 | elif item["type"] == "dir": 162 | logging.debug(item["name"] + " is a directory") 163 | for nested_item in query_api( 164 | f"{GITHUB_API_SPHINX_DIR}/{item['name']}" 165 | ).json(): 166 | logging.debug(f"Checking {nested_item['name']}") 167 | if nested_item["name"] in files: 168 | index = files.index(nested_item["name"]) 169 | if nested_item["sha"] != get_git_revision_hash(paths[index]): 170 | logging.debug( 171 | f"Local {nested_item['name']} is different to remote" 172 | ) 173 | download_file( 174 | nested_item["download_url"], 175 | os.path.join( 176 | SPHINX_UPDATE_DIR, item["name"], nested_item["name"] 177 | ), 178 | ) 179 | # Downloads NEW nested files 180 | else: 181 | logging.debug(f"No local version found of {nested_item['name']}") 182 | if nested_item["type"] == "file": 183 | new_file_list.append(nested_item["name"]) 184 | download_file( 185 | nested_item["download_url"], 186 | os.path.join( 187 | SPHINX_UPDATE_DIR, item["name"], nested_item["name"] 188 | ), 189 | ) 190 | # Downloads NEW files in '.sphinx' starter pack static root 191 | else: 192 | if item["type"] == "file": 193 | logging.debug(f"No local version found of {item['name']}") 194 | download_file( 195 | item["download_url"], os.path.join(SPHINX_UPDATE_DIR, item["name"]) 196 | ) 197 | if item["name"] != "version": 198 | new_file_list.append(item["name"]) 199 | # Writes return value for parent function 200 | if os.path.exists(os.path.join(SPHINX_UPDATE_DIR)): 201 | logging.debug("Files have been downloaded") 202 | files_updated = True 203 | else: 204 | logging.debug("No downloads found") 205 | files_updated = False 206 | # Writes return value for parent function 207 | if new_file_list != []: 208 | # Provides more information on new files 209 | with open("NEWFILES.txt", "w") as f: 210 | for entry in new_file_list: 211 | f.write(f"{entry}\n") 212 | logging.debug("Some downloaded files are new") 213 | return files_updated, True 214 | return files_updated, False 215 | 216 | 217 | # Checks git hash of a file 218 | def get_git_revision_hash(file) -> str: 219 | """Get SHA of local files""" 220 | logging.debug(f"Getting hash of {os.path.basename(file)}") 221 | return subprocess.check_output(["git", "hash-object", file]).decode("ascii").strip() 222 | 223 | 224 | # Examines local files 225 | def get_local_files_and_paths(): 226 | """Identify '.sphinx' local files and paths""" 227 | logging.debug("Checking local files and paths") 228 | try: 229 | files = [] 230 | paths = [] 231 | patterns = [".*", "**.*", "metrics/**.*"] 232 | files, paths = [], [] 233 | 234 | for pattern in patterns: 235 | for file in glob.iglob(os.path.join(SPHINX_DIR, pattern), recursive=True): 236 | files.append(os.path.basename(file)) 237 | paths.append(file) 238 | return files, paths 239 | except Exception as e: 240 | logging.debug(e) 241 | raise RuntimeError("get_local_files_and_paths()") from e 242 | 243 | 244 | # General API query with timeout and RequestException 245 | def query_api(url): 246 | """Query an API with a globally set timeout""" 247 | logging.debug(f"Querying {url}") 248 | try: 249 | r = requests.get(url, timeout=TIMEOUT) 250 | return r 251 | except RequestException as e: 252 | raise RuntimeError(f"Failed query_api(): {url}") from e 253 | 254 | 255 | # General file download function 256 | def download_file(url, output_path): 257 | """Download a file to a specified path""" 258 | logging.debug(f"Downloading {os.path.basename(output_path)}") 259 | try: 260 | os.makedirs(os.path.dirname(output_path), exist_ok=True) 261 | with open(output_path, "wb") as file: 262 | file.write(query_api(url).content) 263 | except Exception as e: 264 | logging.debug(e) 265 | raise RuntimeError(f"Failed download_file(): {url}") from e 266 | 267 | 268 | if __name__ == "__main__": 269 | sys.exit(main()) # Keep return code 270 | -------------------------------------------------------------------------------- /docs/_static/js/bundle.js: -------------------------------------------------------------------------------- 1 | (()=>{"use strict";function e(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);t&&(o=o.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,o)}return n}function t(t){for(var n=1;ncookie policy.',buttonAccept:"Accept all and visit site",buttonManage:"Manage your tracker settings"},manager:{title:"Tracking choices",body1:"We use cookies to recognise visitors and remember your preferences.",body2:"They enhance user experience, personalise content and ads, provide social media features, measure campaign effectiveness, and analyse site traffic.",body3:"Select the types of trackers you consent to, both by us, and third parties.",body4:'Learn more at data privacy: cookie policy - you can change your choices at any time from the footer of the site.',acceptAll:"Accept all",acceptAllHelp:'This will switch all toggles "ON".',SavePreferences:"Save preferences"}},zh:{notification:{title:"您的追踪器设置",body1:"我们使用cookie和相似的方法来识别访问者和记住偏好设置。我们也用它们来衡量活动的效果和网站流量分析。",body2:"选择”接受“,您同意我们和受信的第三方来使用这些方式。",body3:'更多内容或者随时地变更您的同意选择,请点击我们的 cookie策略.',buttonAccept:"接受全部和访问网站",buttonManage:"管理您的追踪器设置"},manager:{title:"追踪选项",body1:"我们使用cookie来识别访问者和记住您的偏好设置",body2:"它们增强用户体验,使内容和广告个性化,提供社交媒体功能,衡量活动效果和网站流量分析。",body3:"选择您同意授予我们和受信的第三方的追踪类型。",body4:'点击数据隐私:cookie策略了解更多,您可以在网站底部随时更改您的选择。',acceptAll:"接受全部",acceptAllHelp:"这将把全部开关变为”开启“。",SavePreferences:"保存偏好设置"}},ja:{notification:{title:"トラッキング機能の設定",body1:"当社は、当社のウェブサイトを訪問された方の識別や傾向の記録を行うために、クッキーおよび類似の手法を利用します。また、キャンペーンの効果の測定やサイトのトラフィックの分析にもクッキーを利用します。",body2:"「同意」を選択すると、当社および信頼できる第三者による上記の手法の利用に同意したものとみなされます。",body3:'詳細または同意の変更については、いつでも当社のクッキーに関するポリシーをご覧になることができます。',buttonAccept:"すべて同意してサイトにアクセス",buttonManage:"トラッキング機能の設定の管理"},manager:{title:"トラッキング機能の選択",body1:"当社は、当社のウェブサイトを訪問された方の識別や傾向の記録を行うために、クッキーを利用します。",body2:"クッキーは、お客様の利便性の向上、お客様に合わせたコンテンツや広告の表示、ソーシャルメディア機能の提供、キャンペーンの効果の測定、サイトのトラフィックの分析に役立ちます。",body3:"当社および第三者によるトラッキング機能のタイプから、お客様が同意されるものをお選びください。",body4:'詳細は、データプライバシー:クッキーに関するポリシーをご覧ください。お客様が選んだ設定は、本サイトの下部からいつでも変更できます。',acceptAll:"すべて同意",acceptAllHelp:"同意されるとすべての設定が「ON」に切り替わります。",SavePreferences:"設定を保存"}}},d={ad_storage:"denied",ad_user_data:"denied",ad_personalization:"denied",analytics_storage:"denied",functionality_storage:"denied",personalization_storage:"denied",security_storage:"denied"},u=["security_storage"],p=["ad_storage","ad_user_data","ad_personalization","analytics_storage"],f=["functionality_storage","personalization_storage"],h=["ad_storage","ad_user_data","ad_personalization","analytics_storage","functionality_storage","personalization_storage"],y=function(e){var t=new Date;t.setTime(t.getTime()+31536e6);var n="expires="+t.toUTCString();document.cookie="_cookies_accepted="+e+"; "+n+"; samesite=lax;path=/;",S(e)&&_()},b=function(){for(var e=document.cookie.split(";"),t="",n="",o=0;o\n ")}},{key:"render",value:function(e){this.container.innerHTML=this.getNotificationMarkup(e),this.initaliseListeners()}},{key:"initaliseListeners",value:function(){var e=this;this.container.querySelector(".js-close").addEventListener("click",(function(t){y("all"),v("all"),e.destroyComponent()})),this.container.querySelector(".js-manage").addEventListener("click",(function(t){e.renderManager()}))}}]),e}(),L=function(){function e(t,n,i){o(this,e),this.language=i,this.id=t.id,this.title=m(t,i).title,this.description=m(t,i).description,this.enableSwitcher=t.enableSwitcher,this.container=n,this.element,this.render()}return a(e,[{key:"render",value:function(){var e=this.cookieIsTrue(),t=document.createElement("div");t.classList.add("u-sv3"),t.innerHTML="\n ".concat(''),"\n

",this.title,"

\n

").concat(this.description,"

"),this.container.appendChild(t),this.element=t.querySelector(".js-".concat(this.id,"-switch"))}},{key:"cookieIsTrue",value:function(){var e=b();return!(!e||e!==this.id&&"all"!==e)||e&&e===this.id}},{key:"isChecked",value:function(){return!!this.element&&this.element.checked}},{key:"getId",value:function(){return this.id}}]),e}(),E=function(){function e(t,n){o(this,e),this.container=t,this.controlsStore=[],this.destroyComponent=n}return a(e,[{key:"getManagerMarkup",value:function(e){var t=g(e).manager;return'\n ")}},{key:"render",value:function(e){var t=this;this.container.innerHTML=this.getManagerMarkup(e);var n=this.container.querySelector(".controls");s.forEach((function(o){var i=new L(o,n,e);t.controlsStore.push(i)})),this.initaliseListeners()}},{key:"initaliseListeners",value:function(){var e=this;this.container.querySelector(".js-close").addEventListener("click",(function(){y("all"),v("all"),e.destroyComponent()})),this.container.querySelector(".js-save-preferences").addEventListener("click",(function(){e.savePreferences(),e.destroyComponent()}))}},{key:"savePreferences",value:function(){var e=this.controlsStore.filter((function(e){return e.isChecked()}));this.controlsStore.length===e.length?y("all"):this.controlsStore.forEach((function(e){e.isChecked()&&y(e.getId())})),function(e){var n=e.filter((function(e){return e.isChecked()})),o=t({},d);n.forEach((function(e){o=k(o,e.id)})),w(o)}(this.controlsStore)}}]),e}();window.gtag||(window.dataLayer=window.dataLayer||[],window.gtag=function(){dataLayer.push(arguments)},window.gtag("consent","default",d));var O=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:null,t=null,n=document.documentElement.lang,o=!1,i=function(e){e&&e.preventDefault(),null===t&&((t=document.createElement("dialog")).classList.add("cookie-policy"),t.setAttribute("open",!0),document.body.appendChild(t),new j(t,a,r).render(n),document.getElementById("cookie-policy-button-accept").focus())},a=function(){new E(t,r).render(n)},r=function(){"function"==typeof e&&e(),document.body.removeChild(t),t=null},c=function(){if(!o){var e;o=!0,(e=b())&&v(e);var t=document.querySelector(".js-revoke-cookie-manager");t&&t.addEventListener("click",i),function(){var e=b();return!e||"true"==e}()&&"hide"!==new URLSearchParams(window.location.search).get("cp")&&i()}};"loading"===document.readyState?document.addEventListener("DOMContentLoaded",c,!1):c()},P=function(e){document.cookie.match(new RegExp("(^| )"+e+"=([^;]+)"))},A=P("_cookies_accepted");function M(){var e,t;if(("all"===(null===(e=A=P("_cookies_accepted"))||void 0===e?void 0:e[2])||"performance"===(null===(t=A)||void 0===t?void 0:t[2]))&&!P("user_id")){var n=([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g,(function(e){return(e^crypto.getRandomValues(new Uint8Array(1))[0]&15>>e/4).toString(16)}));document.cookie="user_id="+n+";max-age=31536000;",dataLayer.push({user_id:n})}}A?(M(),O()):O(M)})(); 2 | --------------------------------------------------------------------------------