├── .bandit
├── .coveragerc
├── .flake8
├── .github
├── renovate.json
└── workflows
│ ├── build.yml
│ ├── build_on_tag.api.yml
│ └── run_tox.yml
├── .gitignore
├── .python-version
├── .yamllint.yml
├── CHANGELOG.md
├── LICENSE
├── MANIFEST.in
├── Makefile
├── README.md
├── compose-files
├── docker-compose.yml
└── podman-compose.yml
├── docker
├── Dockerfile-api
├── Dockerfile-workers
├── iib-httpd.conf
├── libpod.conf
├── message_broker
│ ├── Dockerfile
│ ├── README.md
│ ├── activemq.xml
│ └── certs
│ │ ├── broker.ks
│ │ ├── ca.crt
│ │ ├── client.crt
│ │ ├── client.key
│ │ └── truststore.ts
└── registry
│ ├── README.md
│ └── auth
│ └── htpasswd
├── docs
├── conf.py
├── gettingstarted.md
├── index.rst
├── module_documentation
│ ├── iib.common.rst
│ ├── iib.rst
│ ├── iib.web.rst
│ ├── iib.workers.rst
│ ├── iib.workers.tasks.rst
│ └── index.rst
└── requirements.txt
├── iib
├── __init__.py
├── common
│ ├── __init__.py
│ ├── common_utils.py
│ └── tracing.py
├── exceptions.py
├── web
│ ├── __init__.py
│ ├── api_v1.py
│ ├── app.py
│ ├── auth.py
│ ├── config.py
│ ├── docs.py
│ ├── errors.py
│ ├── iib_static_types.py
│ ├── manage.py
│ ├── messaging.py
│ ├── migrations
│ │ ├── alembic.ini
│ │ ├── env.py
│ │ ├── script.py.mako
│ │ └── versions
│ │ │ ├── 04dd7532d9c5_polymorphic_requests.py
│ │ │ ├── 1920ad83d0ab_adding_ignore_bundle_ocp_version.py
│ │ │ ├── 274ba38408e8_initial_migration.py
│ │ │ ├── 2ab3d4558cb6_add_omps_operator_version.py
│ │ │ ├── 3283f52e7329_add_internal_index_image_copy_to_add_and_rm.py
│ │ │ ├── 49d13af4b328_add_add_deprecations_api_endpoint.py
│ │ │ ├── 4c9db41195ec_add_merge_index_image_api_endpoint.py
│ │ │ ├── 5188702409d9_extra_build_tags.py
│ │ │ ├── 56d96595c0f7_add_batches.py
│ │ │ ├── 5d6808c0ce1f_regenerate_bundle_request.py
│ │ │ ├── 60f89c046096_make_binary_image_optional.py
│ │ │ ├── 625fba6081be_add_recursive_related_bundles_endpoint.py
│ │ │ ├── 71c998c1c210_batch_annotations.py
│ │ │ ├── 7346beaff092_add_check_related_image_flag.py
│ │ │ ├── 7573241a5156_rename_bundle_deprecation_association_.py
│ │ │ ├── 8d50f82f0be9_fbc_operations_api.py
│ │ │ ├── 983a81fe5e98_added_distribution_scope_attribute_for_.py
│ │ │ ├── 9d60d35786c1_added_index_image_resolved.py
│ │ │ ├── 9e9d4f9730c8_merge_graph_update.py
│ │ │ ├── a0eadb516360_update_regenerate_bundle_request_.py
│ │ │ ├── bc29053265ba_add_distribution_scope.py
│ │ │ ├── daf67ddcf4a1_add_support_for_graph_update_mode_in_.py
│ │ │ ├── e16a8cd2e028_add_create_empty_index.py
│ │ │ └── eec630370e68_support_deprecation_list_in_add_request_.py
│ ├── models.py
│ ├── s3_utils.py
│ ├── static
│ │ ├── api_v1.yaml
│ │ └── docs.html
│ ├── utils.py
│ └── wsgi.py
└── workers
│ ├── __init__.py
│ ├── api_utils.py
│ ├── config.py
│ ├── dogpile_cache.py
│ ├── greenwave.py
│ ├── s3_utils.py
│ └── tasks
│ ├── __init__.py
│ ├── build.py
│ ├── build_add_deprecations.py
│ ├── build_create_empty_index.py
│ ├── build_fbc_operations.py
│ ├── build_merge_index_image.py
│ ├── build_recursive_related_bundles.py
│ ├── build_regenerate_bundle.py
│ ├── celery.py
│ ├── fbc_utils.py
│ ├── general.py
│ ├── iib_static_types.py
│ ├── opm_operations.py
│ └── utils.py
├── pyproject.toml
├── requirements-test.in
├── requirements-test.txt
├── requirements.txt
├── setup.py
├── tests
├── __init__.py
├── conftest.py
├── test_web
│ ├── test_api_v1.py
│ ├── test_app.py
│ ├── test_broker_error.py
│ ├── test_messaging.py
│ ├── test_migrations.py
│ ├── test_models.py
│ └── test_s3_utils.py
└── test_workers
│ ├── __init__.py
│ ├── test_api_utils.py
│ ├── test_config.py
│ ├── test_dogpile_cache.py
│ ├── test_greenwave.py
│ ├── test_s3_utils.py
│ └── test_tasks
│ ├── __init__.py
│ ├── test_build.py
│ ├── test_build_add_deprecations.py
│ ├── test_build_create_empty_index.py
│ ├── test_build_fbc_operations.py
│ ├── test_build_merge_index_image.py
│ ├── test_build_recursive_related_bundles.py
│ ├── test_build_regenerate_bundle.py
│ ├── test_fbc_utils.py
│ ├── test_general.py
│ ├── test_opm_operations.py
│ ├── test_utils.py
│ └── test_utils_cache.py
└── tox.ini
/.bandit:
--------------------------------------------------------------------------------
1 | [bandit]
2 | exclude: /tests,/docker,/htmlcov,/.pytest-cache,/.git*,__pycache__,/.tox,.eggs,*.egg,/env*,/iib-data,/venv,/venv*,/.vscode,/.pyre
3 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [report]
2 | show_missing = True
3 | fail_under = 90
4 | exclude_lines =
5 | pragma: no cover
6 | if __name__ == .__main__.:
7 | def __repr__
8 | omit =
9 | iib/web/manage.py
10 | iib/web/wsgi.py
11 | iib/web/config.py
12 | iib/web/migrations*
13 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude =
3 | .tox
4 | build
5 | dist
6 | venv
7 | per-file-ignores =
8 | ./iib/workers/tasks/build_regenerate_bundle.py: E713
9 | ./iib/workers/tasks/utils.py: E203,E702
10 | ./iib/workers/tasks/build_add_deprecations.py: E713
11 | ./iib/workers/tasks/opm_operations.py: E203
12 | ./iib/web/api_v1.py: E226
13 | ./iib/web/migrations/versions/*: D103
14 | ./tests/*: D103
15 | ./tests/test_web/test_models.py: D103
16 | ./tests/test_web/test_s3_utils.py: D103
17 | ./tests/test_web/test_api_v1.py: D103
18 | ./tests/test_workers/test_tasks/test_build.py: D103,E231
19 | ./tests/test_workers/test_tasks/test_build_regenerate_bundle.py: D103,E241,E222
20 | ./tests/test_workers/test_tasks/test_opm_operations.py: D103, E203
21 | ./tests/test_web/test_migrations.py: E231,D103
22 | extend-ignore = E231
23 |
--------------------------------------------------------------------------------
/.github/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "constraints": {
3 | "python": "==3.12"
4 | },
5 | "enabled": true,
6 | "enabledManagers": ["docker-compose", "dockerfile", "github-actions", "pip_requirements", "pyenv"],
7 | "branchConcurrentLimit": 5,
8 | "docker-compose": {
9 | "fileMatch": ["podman-compose.yml$"]
10 | },
11 | "dependencyDashboard": true,
12 | "reviewers": ["team:exd-guild-hello-operator"]
13 | }
14 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Build images and push to quay.io
3 |
4 | on:
5 | # Start build when GitHub release is published
6 | release:
7 | types: [published]
8 | # Allow manual trigger
9 | workflow_dispatch:
10 | # Rebuild every Monday at 4:30 UTC
11 | schedule:
12 | - cron: '30 4 * * 1'
13 |
14 | jobs:
15 | # this job will only be triggered when one of the above trigger condition is met
16 | deployments:
17 | name: Build and Push to quay.io
18 | runs-on: ubuntu-latest
19 |
20 | steps:
21 | - name: Get latest tag
22 | uses: oprypin/find-latest-tag@v1
23 | with:
24 | repository: release-engineering/iib
25 | releases-only: true
26 | prefix: 'v'
27 | id: iibtag
28 |
29 | - name: Checkout code
30 | uses: actions/checkout@v4
31 | with:
32 | ref: ${{ github.event.release.tag_name || steps.iibtag.outputs.tag }}
33 |
34 | - name: Build iib-worker
35 | id: build-iib-worker
36 | uses: redhat-actions/buildah-build@v2
37 | with:
38 | image: iib-worker
39 | tags: ${{ github.event.release.tag_name || steps.iibtag.outputs.tag }} latest
40 | dockerfiles: |
41 | ./docker/Dockerfile-workers
42 |
43 | - name: Build iib-api
44 | id: build-iib-api
45 | uses: redhat-actions/buildah-build@v2
46 | with:
47 | image: iib-api
48 | tags: ${{ github.event.release.tag_name || steps.iibtag.outputs.tag }} latest
49 | dockerfiles: |
50 | ./docker/Dockerfile-api
51 |
52 | - name: Build iib-message-broker
53 | id: build-iib-message-broker
54 | uses: redhat-actions/buildah-build@v2
55 | with:
56 | image: iib-message-broker
57 | tags: ${{ github.event.release.tag_name || steps.iibtag.outputs.tag }} latest
58 | dockerfiles: |
59 | ./docker/message_broker/Dockerfile
60 |
61 | - name: Push iib-worker to quay.io
62 | id: push-iib-worker
63 | uses: redhat-actions/push-to-registry@v2.7.1
64 | with:
65 | image: ${{ steps.build-iib-worker.outputs.image }}
66 | tags: ${{ steps.build-iib-worker.outputs.tags }}
67 | registry: quay.io/exd-guild-hello-operator
68 | username: ${{ secrets.REGISTRY_QUAY_IO_USER }}
69 | password: ${{ secrets.REGISTRY_QUAY_IO_PASSWORD }}
70 |
71 | - name: Push iib-api to quay.io
72 | id: push-iib-api
73 | uses: redhat-actions/push-to-registry@v2.7.1
74 | with:
75 | image: ${{ steps.build-iib-api.outputs.image }}
76 | tags: ${{ steps.build-iib-api.outputs.tags }}
77 | registry: quay.io/exd-guild-hello-operator
78 | username: ${{ secrets.REGISTRY_QUAY_IO_USER }}
79 | password: ${{ secrets.REGISTRY_QUAY_IO_PASSWORD }}
80 |
81 | - name: Push iib-message-broker to quay.io
82 | id: push-iib-message-broker
83 | uses: redhat-actions/push-to-registry@v2.7.1
84 | with:
85 | image: ${{ steps.build-iib-message-broker.outputs.image }}
86 | tags: ${{ steps.build-iib-message-broker.outputs.tags }}
87 | registry: quay.io/exd-guild-hello-operator
88 | username: ${{ secrets.REGISTRY_QUAY_IO_USER }}
89 | password: ${{ secrets.REGISTRY_QUAY_IO_PASSWORD }}
90 |
--------------------------------------------------------------------------------
/.github/workflows/build_on_tag.api.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Build IIB-API image and push to quay.io
3 |
4 | on:
5 | push:
6 | tags:
7 | - '*'
8 |
9 | jobs:
10 | # this job will only be triggered when tag is pushed
11 | deployments:
12 | name: Build and Push IIB-API to quay.io
13 | runs-on: ubuntu-latest
14 |
15 | steps:
16 | - name: Checkout code
17 | uses: actions/checkout@v4
18 | with:
19 | ref: ${{ github.event.release.tag_name }}
20 |
21 | - name: Build iib-api
22 | id: build-iib-api
23 | uses: redhat-actions/buildah-build@v2
24 | with:
25 | image: iib-api
26 | tags: qe
27 | dockerfiles: |
28 | ./docker/Dockerfile-api
29 |
30 | - name: Push iib-api to quay.io
31 | id: push-iib-api
32 | uses: redhat-actions/push-to-registry@v2.7.1
33 | with:
34 | image: ${{ steps.build-iib-api.outputs.image }}
35 | tags: ${{ steps.build-iib-api.outputs.tags }}
36 | registry: quay.io/exd-guild-hello-operator
37 | username: ${{ secrets.REGISTRY_QUAY_IO_USER }}
38 | password: ${{ secrets.REGISTRY_QUAY_IO_PASSWORD }}
39 |
--------------------------------------------------------------------------------
/.github/workflows/run_tox.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Run tox
3 |
4 | on:
5 | pull_request:
6 | workflow_dispatch:
7 | push:
8 | branches:
9 | - "master"
10 |
11 | jobs:
12 | build:
13 | runs-on: ubuntu-latest
14 | strategy:
15 | matrix:
16 | # https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json
17 | python-version: ["3.12", "3.13"]
18 | steps:
19 | - uses: actions/checkout@v4
20 | - name: Set up Python ${{ matrix.python-version }}
21 | uses: actions/setup-python@v5
22 | with:
23 | python-version: ${{ matrix.python-version }}
24 | - name: Install dependencies
25 | run: |
26 | sudo apt update -y
27 | sudo apt install libkrb5-dev -y
28 | python -m pip install --upgrade pip
29 | pip install --upgrade virtualenv
30 | pip install --upgrade tox>4.0.0
31 | pip install --pre tox-gh-actions
32 | - name: Run static tests with tox
33 | run: |
34 | tox -m static
35 | - name: Run unit tests with tox
36 | run: |
37 | tox -m test
38 | - name: Run documentation build test with tox
39 | run: |
40 | tox -m docs
41 | - name: Run security checks with tox
42 | run: |
43 | tox -m security
44 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pipenv
85 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
86 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
87 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
88 | # install all needed dependencies.
89 | #Pipfile.lock
90 |
91 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
92 | __pypackages__/
93 |
94 | # Celery stuff
95 | celerybeat-schedule
96 | celerybeat.pid
97 |
98 | # SageMath parsed files
99 | *.sage.py
100 |
101 | # Environments
102 | .env
103 | .venv
104 | env/
105 | venv/
106 | ENV/
107 | env.bak/
108 | venv.bak/
109 |
110 | # Spyder project settings
111 | .spyderproject
112 | .spyproject
113 |
114 | # Rope project settings
115 | .ropeproject
116 |
117 | # mkdocs documentation
118 | /site
119 |
120 | # mypy
121 | .mypy_cache/
122 | .dmypy.json
123 | dmypy.json
124 |
125 | # Pyre type checker
126 | .pyre/
127 |
128 | # IDE
129 | .vscode/
130 |
131 | # docker-compose volumes and files
132 | /iib_data/
133 | /ca-bundle.crt
134 | compose-files/docker/
135 |
136 | # other files
137 | **/.DS_Store
138 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.13.3
2 |
--------------------------------------------------------------------------------
/.yamllint.yml:
--------------------------------------------------------------------------------
1 | ---
2 | extends: default
3 |
4 | rules:
5 | # 100 chars should be enough, but don't fail if a line is longer
6 | line-length:
7 | max: 100
8 | level: warning
9 |
10 | truthy:
11 | # disable checking keys for truthy values (removes warning for "on" section in github workflows)
12 | # https://yamllint.readthedocs.io/en/stable/rules.html#module-yamllint.rules.truthy
13 | check-keys: false
14 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-include iib/web/migrations *
2 | recursive-include iib/web/static *
3 | include requirements.txt LICENSE
4 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Set the default composer while allowing user to overwrite via the
2 | # environment variable IIB_COMPOSE_ENGINE.
3 | IIB_COMPOSE_ENGINE ?= docker-compose
4 | IIB_COMPOSE_RUNNER = ${IIB_COMPOSE_ENGINE} -f ${PWD}/compose-files/${IIB_COMPOSE_ENGINE}.yml
5 |
6 | # Declare non-file targets to avoid potential conflict with files
7 | # of the same name.
8 | .PHONY: all up test
9 |
10 | # Older versions of podman-compose do not support deleting volumes via -v
11 | COMPOSER_DOWN_OPTS := -v
12 | COMPOSER_DOWN_HELP := $(shell ${IIB_COMPOSE_ENGINE} down --help)
13 | ifeq (,$(findstring volume,$(COMPOSER_DOWN_HELP)))
14 | COMPOSER_DOWN_OPTS :=
15 | endif
16 |
17 | all:
18 | @echo 'Available make targets:'
19 | @echo ''
20 | @echo 'down:'
21 | @echo ' Destroy the local development instance of IIB.'
22 | @echo ''
23 | @echo 'up:'
24 | @echo ' Run a local development instance of IIB.'
25 | @echo ''
26 | @echo 'build:'
27 | @echo ' Build the container images used in the local development instance of IIB.'
28 | @echo ' This is useful for forcing the images to be rebuilt.'
29 | @echo ''
30 | @echo 'test:'
31 | @echo ' Execute unit tests and linters. Use the command "tox" directly for more options.'
32 | @echo ''
33 | @echo 'NOTE: By default, the targets use docker-compose. Alternatively, set the'
34 | @echo ' IIB_COMPOSE_ENGINE environment variable to another compose system, e.g.'
35 | @echo ' "podman-compose".'
36 |
37 | up: ca-bundle.crt iib-data
38 | @echo "Starting the local development instance..."
39 | ${IIB_COMPOSE_RUNNER} up -d
40 |
41 | down:
42 | @echo "Destroying the local development instance..."
43 | ${IIB_COMPOSE_RUNNER} down $(COMPOSER_DOWN_OPTS)
44 | @rm -rf iib_data
45 |
46 | build:
47 | @echo "Building the container images for the local development instance..."
48 | ${IIB_COMPOSE_RUNNER} build
49 |
50 | test:
51 | @tox
52 |
53 | ca-bundle.crt:
54 | @cp -f /etc/pki/tls/certs/ca-bundle.crt .
55 |
56 | iib-data:
57 | @mkdir -p iib_data/registry
58 |
--------------------------------------------------------------------------------
/compose-files/docker-compose.yml:
--------------------------------------------------------------------------------
1 | ---
2 | version: '3'
3 | services:
4 | # This "service" generates the certificate for the registry. Then,
5 | # it exits with status code 0.
6 | minica:
7 | image: registry.access.redhat.com/ubi8/go-toolset:latest
8 | user: root
9 | command:
10 | - /bin/sh
11 | - -c
12 | - >-
13 | go install github.com/jsha/minica@latest &&
14 | cd /opt/app-root/certs &&
15 | /opt/app-root/src/bin/minica --domains registry
16 | environment:
17 | GOPATH: /opt/app-root/src
18 | volumes:
19 | - registry-certs-volume:/opt/app-root/certs:z
20 |
21 | registry:
22 | image: registry:3
23 | ports:
24 | - 8443:8443
25 | environment:
26 | REGISTRY_HTTP_ADDR: 0.0.0.0:8443
27 | REGISTRY_HTTP_TLS_CERTIFICATE: /certs/registry/cert.pem
28 | REGISTRY_HTTP_TLS_KEY: /certs/registry/key.pem
29 | REGISTRY_AUTH: htpasswd
30 | REGISTRY_AUTH_HTPASSWD_PATH: /auth/htpasswd
31 | REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm
32 | volumes:
33 | - ../iib_data/registry:/var/lib/registry:z
34 | - registry-certs-volume:/certs:z
35 | - ../docker/registry/auth:/auth:z
36 | depends_on:
37 | minica:
38 | condition: service_completed_successfully
39 |
40 | db:
41 | image: postgres:17.5
42 | environment:
43 | POSTGRES_USER: iib
44 | POSTGRES_PASSWORD: iib
45 | POSTGRES_DB: iib
46 | POSTGRES_INITDB_ARGS: "--auth='ident' --auth='trust'"
47 |
48 | memcached:
49 | image: memcached
50 | ports:
51 | - 11211:11211
52 |
53 | rabbitmq:
54 | image: rabbitmq:4.1-management
55 | environment:
56 | RABBITMQ_DEFAULT_USER: iib
57 | RABBITMQ_DEFAULT_PASS: iib
58 | # Avoid port conflict with ActiveMQ broker when using podman-compose.
59 | # Even though the port is not exposed, podman-compose's use of a pod
60 | # requires the ports to be unique across all containers within the pod.
61 | RABBITMQ_NODE_PORT: 5673
62 | ports:
63 | # The RabbitMQ management console
64 | - 8081:15672
65 |
66 | jaeger:
67 | image: jaegertracing/all-in-one
68 | container_name: jaeger
69 | ports:
70 | - 4318:4318
71 | - 16686:16686 # web
72 | environment:
73 | - COLLECTOR_OTLP_ENABLED=true
74 |
75 | iib-api:
76 | build:
77 | context: ..
78 | dockerfile: ./docker/Dockerfile-api
79 | command:
80 | - /bin/sh
81 | - -c
82 | - >-
83 | mkdir -p /etc/iib &&
84 | cp /broker-certs/client.crt /etc/iib/messaging.crt &&
85 | cp /broker-certs/client.key /etc/iib/messaging.key &&
86 | cp /broker-certs/ca.crt /etc/iib/messaging-ca.crt &&
87 | pip3 uninstall -y iib &&
88 | python3 setup.py develop --no-deps &&
89 | iib wait-for-db &&
90 | iib db upgrade &&
91 | flask run --reload --host 0.0.0.0 --port 8080
92 | environment:
93 | FLASK_ENV: development
94 | FLASK_APP: iib/web/wsgi.py
95 | IIB_DEV: 'true'
96 | # enable OpenTelemetry for dev-env
97 | IIB_OTEL_TRACING: 'true'
98 | OTEL_EXPORTER_OTLP_ENDPOINT: http://jaeger:4318
99 | OTEL_EXPORTER_OTLP_TRACES_INSECURE: 'true'
100 | OTEL_SERVICE_NAME: "iib-api"
101 | volumes:
102 | - ../:/src:z
103 | - ../docker/message_broker/certs:/broker-certs:ro,z
104 | - request-logs-volume:/var/log/iib/requests:z
105 | - request-related-bundles-volume:/var/lib/requests/related_bundles:z
106 | - request-recursive-related-bundles-volume:/var/lib/requests/recursive_related_bundles:z
107 | ports:
108 | - 8080:8080
109 | depends_on:
110 | - db
111 | - message-broker
112 | - jaeger
113 |
114 | iib-worker:
115 | build:
116 | context: ..
117 | dockerfile: ./docker/Dockerfile-workers
118 | # Override the default command so that Celery auto-reloads on code changes.
119 | # This also adds the self-signed CA that was used to sign the Docker registry's certificate
120 | # to the trusted CA bundle. This will make podman trust the local Docker registry's certificate.
121 | command:
122 | - /bin/bash
123 | - -c
124 | - >-
125 | cp /host-ca-bundle.crt /etc/pki/tls/certs/ca-bundle.crt &&
126 | cat /registry-certs/minica.pem >> /etc/pki/tls/certs/ca-bundle.crt &&
127 | podman login --authfile ~/.docker/config.json.template -u iib \
128 | -p iibpassword registry:8443 &&
129 | pip3 install watchdog[watchmedo] &&
130 | watchmedo auto-restart -d ./iib/workers -p '*.py' --recursive \
131 | -- celery -A iib.workers.tasks worker --loglevel=info
132 | environment:
133 | IIB_DEV: 'true'
134 | REGISTRY_AUTH_FILE: '/root/.docker/config.json'
135 | REQUESTS_CA_BUNDLE: /etc/pki/tls/certs/ca-bundle.crt
136 | # Make this privileged to be able to build container images
137 | # enable OpenTelemetry for dev-env
138 | IIB_OTEL_TRACING: 'true'
139 | OTEL_EXPORTER_OTLP_ENDPOINT: http://jaeger:4318
140 | OTEL_EXPORTER_OTLP_TRACES_INSECURE: 'true'
141 | OTEL_SERVICE_NAME: "iib-worker"
142 | privileged: true
143 | volumes:
144 | - ../:/src:z
145 | - worker_container_storage:/var/lib/containers:z
146 | - registry-certs-volume:/registry-certs:ro,z
147 | - ../ca-bundle.crt:/host-ca-bundle.crt:ro
148 | - request-logs-volume:/var/log/iib/requests:z
149 | - request-related-bundles-volume:/var/lib/requests/related_bundles:z
150 | - request-recursive-related-bundles-volume:/var/lib/requests/recursive_related_bundles:z
151 | depends_on:
152 | - rabbitmq
153 | - registry
154 | - minica
155 | - memcached
156 | - jaeger
157 |
158 | # This is an external message broker used to publish messages about state changes
159 | message-broker:
160 | build:
161 | context: ..
162 | dockerfile: ./docker/message_broker/Dockerfile
163 | volumes:
164 | - message-broker-volume:/opt/activemq/data:z
165 | - ../docker/message_broker/certs:/broker-certs:ro,z
166 | ports:
167 | - 5671:5671 # amqp+ssl
168 | - 5672:5672 # amqp
169 | - 8161:8161 # web console
170 |
171 | volumes:
172 | registry-certs-volume:
173 | message-broker-volume:
174 | request-logs-volume:
175 | request-related-bundles-volume:
176 | request-recursive-related-bundles-volume:
177 | worker_container_storage:
178 |
--------------------------------------------------------------------------------
/compose-files/podman-compose.yml:
--------------------------------------------------------------------------------
1 | ---
2 | version: '3'
3 | services:
4 | # This "service" generates the certificate for the registry. Then,
5 | # it exits with status code 0.
6 | minica:
7 | image: registry.access.redhat.com/ubi8/go-toolset:latest
8 | command:
9 | - /bin/sh
10 | - -c
11 | - >-
12 | go install github.com/jsha/minica@latest &&
13 | cd /opt/app-root/certs &&
14 | /opt/app-root/src/bin/minica --domains registry
15 | environment:
16 | GOPATH: /opt/app-root/src
17 | volumes:
18 | - registry-certs-volume:/opt/app-root/certs:z
19 |
20 | registry:
21 | image: registry:3
22 | ports:
23 | - 8443:8443
24 | environment:
25 | REGISTRY_HTTP_ADDR: 0.0.0.0:8443
26 | REGISTRY_HTTP_TLS_CERTIFICATE: /certs/registry/cert.pem
27 | REGISTRY_HTTP_TLS_KEY: /certs/registry/key.pem
28 | REGISTRY_AUTH: htpasswd
29 | REGISTRY_AUTH_HTPASSWD_PATH: /auth/htpasswd
30 | REGISTRY_AUTH_HTPASSWD_REALM: Registry Realm
31 | volumes:
32 | - ../iib_data/registry:/var/lib/registry
33 | - registry-certs-volume:/certs:z
34 | - ../docker/registry/auth:/auth
35 | depends_on:
36 | minica:
37 | condition: service_completed_successfully
38 |
39 | db:
40 | image: postgres:17.5
41 | environment:
42 | POSTGRES_USER: iib
43 | POSTGRES_PASSWORD: iib
44 | POSTGRES_DB: iib
45 | POSTGRES_INITDB_ARGS: "--auth='ident' --auth='trust'"
46 |
47 | memcached:
48 | image: memcached
49 | ports:
50 | - 11211:11211
51 |
52 | rabbitmq:
53 | image: rabbitmq:4.1-management
54 | environment:
55 | RABBITMQ_DEFAULT_USER: iib
56 | RABBITMQ_DEFAULT_PASS: iib
57 | # Avoid port conflict with ActiveMQ broker when using podman-compose.
58 | # Even though the port is not exposed, podman-compose's use of a pod
59 | # requires the ports to be unique across all containers within the pod.
60 | RABBITMQ_NODE_PORT: 5673
61 | ports:
62 | # The RabbitMQ management console
63 | - 8081:15672
64 |
65 | jaeger:
66 | image: jaegertracing/all-in-one
67 | container_name: jaeger
68 | ports:
69 | - 4318:4318
70 | - 16686:16686 # web
71 | environment:
72 | - COLLECTOR_OTLP_ENABLED=true
73 |
74 | iib-api:
75 | build:
76 | context: ..
77 | dockerfile: ./docker/Dockerfile-api
78 | command:
79 | - /bin/sh
80 | - -c
81 | - >-
82 | mkdir -p /etc/iib &&
83 | cp /broker-certs/client.crt /etc/iib/messaging.crt &&
84 | cp /broker-certs/client.key /etc/iib/messaging.key &&
85 | cp /broker-certs/ca.crt /etc/iib/messaging-ca.crt &&
86 | pip3 uninstall -y iib &&
87 | python3 setup.py develop --no-deps &&
88 | iib wait-for-db &&
89 | iib db upgrade &&
90 | flask run --reload --host 0.0.0.0 --port 8080
91 | environment:
92 | FLASK_ENV: development
93 | FLASK_APP: iib/web/wsgi.py
94 | IIB_DEV: 'true'
95 | IIB_OTEL_TRACING: 'true'
96 | OTEL_EXPORTER_OTLP_ENDPOINT: http://jaeger:4318
97 | OTEL_EXPORTER_OTLP_TRACES_INSECURE: 'true'
98 | OTEL_SERVICE_NAME: "iib-api"
99 | volumes:
100 | - ../:/src
101 | - ../docker/message_broker/certs:/broker-certs
102 | - request-logs-volume:/var/log/iib/requests:z
103 | - request-related-bundles-volume:/var/lib/requests/related_bundles:z
104 | - request-recursive-related-bundles-volume:/var/lib/requests/recursive_related_bundles:z
105 | ports:
106 | - 8080:8080
107 | depends_on:
108 | - db
109 | - message-broker
110 | - jaeger
111 |
112 | iib-worker:
113 | build:
114 | context: ..
115 | dockerfile: ./docker/Dockerfile-workers
116 | # Override the default command so that Celery auto-reloads on code changes.
117 | # This also adds the self-signed CA that was used to sign the Docker registry's certificate
118 | # to the trusted CA bundle. This will make podman trust the local Docker registry's certificate.
119 | # cp host-ca-bundle.crt /etc/pki/tls/certs/ca-bundle.crt &&
120 | command:
121 | - /bin/bash
122 | - -c
123 | - >-
124 | cat /registry-certs/minica.pem >> /etc/pki/tls/certs/ca-bundle.crt &&
125 | podman login --authfile ~/.docker/config.json.template -u iib \
126 | -p iibpassword registry:8443 &&
127 | pip3 install watchdog[watchmedo] &&
128 | watchmedo auto-restart -d ./iib/workers -p '*.py' --recursive \
129 | -- celery -A iib.workers.tasks worker --loglevel=info
130 | environment:
131 | IIB_DEV: 'true'
132 | REGISTRY_AUTH_FILE: '/root/.docker/config.json'
133 | REQUESTS_CA_BUNDLE: /etc/pki/tls/certs/ca-bundle.crt
134 | # Make this privileged to be able to build container images
135 | # enable OpenTelemetry for dev-env
136 | IIB_OTEL_TRACING: 'true'
137 | OTEL_EXPORTER_OTLP_ENDPOINT: http://jaeger:4318
138 | OTEL_EXPORTER_OTLP_TRACES_INSECURE: 'true'
139 | OTEL_SERVICE_NAME: "iib-worker"
140 | privileged: true
141 | volumes:
142 | - ../:/src
143 | - worker_container_storage:/var/lib/containers:z
144 | # - ./docker/registry/certs:/registry-certs
145 | - registry-certs-volume:/registry-certs
146 | - ../ca-bundle.crt:/host-ca-bundle.crt
147 | - request-logs-volume:/var/log/iib/requests:z
148 | - request-related-bundles-volume:/var/lib/requests/related_bundles:z
149 | - request-recursive-related-bundles-volume:/var/lib/requests/recursive_related_bundles:z
150 | depends_on:
151 | - rabbitmq
152 | - registry
153 | - minica
154 | - memcached
155 | - jaeger
156 |
157 | # This is an external message broker used to publish messages about state changes
158 | message-broker:
159 | build:
160 | context: ..
161 | dockerfile: ./docker/message_broker/Dockerfile
162 | volumes:
163 | - message-broker-volume:/opt/activemq/data:z
164 | - ../docker/message_broker/certs:/broker-certs:ro
165 | ports:
166 | - 5671:5671 # amqp+ssl
167 | - 5672:5672 # amqp
168 | - 8161:8161 # web console
169 |
170 | volumes:
171 | registry-certs-volume:
172 | message-broker-volume:
173 | request-logs-volume:
174 | request-related-bundles-volume:
175 | request-recursive-related-bundles-volume:
176 | worker_container_storage:
177 |
--------------------------------------------------------------------------------
/docker/Dockerfile-api:
--------------------------------------------------------------------------------
1 | FROM registry.access.redhat.com/ubi8/ubi:latest
2 | LABEL maintainer="Red Hat - EXD"
3 |
4 | WORKDIR /src
5 | # openssl-devel is required when compiling python-qpid-proton to support SSL
6 | RUN dnf -y install \
7 | --setopt=deltarpm=0 \
8 | --setopt=install_weak_deps=false \
9 | --setopt=tsflags=nodocs \
10 | gcc \
11 | httpd \
12 | krb5-devel \
13 | libffi-devel \
14 | libpq-devel \
15 | mod_auth_gssapi \
16 | mod_ssl \
17 | python3.12-mod_wsgi \
18 | openssl-devel \
19 | python3.12-devel \
20 | python3.12-pip \
21 | python3.12-wheel \
22 | python3.12-setuptools \
23 | && dnf update -y \
24 | && dnf clean all
25 | RUN update-alternatives --set python3 $(which python3.12)
26 |
27 | COPY . .
28 | COPY ./docker/iib-httpd.conf /etc/httpd/conf/httpd.conf
29 |
30 | # default python3-pip version for rhel8 python3.8 is 19.3.1 and it can't be updated by dnf
31 | # we have to update it by pip to version above 21.0.0
32 | RUN pip3 install --upgrade pip
33 | RUN pip3 install -r requirements.txt --no-deps --require-hashes
34 | RUN pip3 install . --no-deps
35 | EXPOSE 8080
36 | CMD ["/usr/sbin/httpd", "-DFOREGROUND"]
37 |
--------------------------------------------------------------------------------
/docker/Dockerfile-workers:
--------------------------------------------------------------------------------
1 | FROM registry.access.redhat.com/ubi8/ubi:latest
2 | LABEL maintainer="Red Hat - EXD"
3 |
4 | WORKDIR /src
5 |
6 | RUN dnf -y install \
7 | --setopt=deltarpm=0 \
8 | --setopt=install_weak_deps=false \
9 | --setopt=tsflags=nodocs \
10 | /etc/containers/storage.conf \
11 | buildah \
12 | fuse-overlayfs \
13 | gcc \
14 | krb5-devel \
15 | libffi-devel \
16 | libpq-devel \
17 | openssl-devel \
18 | podman \
19 | python3.12-devel \
20 | python3.12-pip \
21 | python3.12-wheel \
22 | runc \
23 | skopeo \
24 | && dnf update -y \
25 | && dnf clean all
26 |
27 | ADD https://github.com/operator-framework/operator-registry/releases/download/v1.26.4/linux-amd64-opm /usr/bin/opm-v1.26.4
28 | RUN chmod +x /usr/bin/opm-v1.26.4
29 | ADD https://github.com/operator-framework/operator-registry/releases/download/v1.40.0/linux-amd64-opm /usr/bin/opm-v1.40.0
30 | RUN chmod +x /usr/bin/opm-v1.40.0
31 | # Create a link for default opm
32 | RUN ln -s /usr/bin/opm-v1.26.4 /usr/bin/opm
33 | RUN chmod +x /usr/bin/opm
34 | ADD https://github.com/fullstorydev/grpcurl/releases/download/v1.8.5/grpcurl_1.8.5_linux_x86_64.tar.gz /src/grpcurl_1.8.5_linux_x86_64.tar.gz
35 | RUN cd /usr/bin && tar -xf /src/grpcurl_1.8.5_linux_x86_64.tar.gz grpcurl && rm -f /src/grpcurl_1.8.5_linux_x86_64.tar.gz
36 | ADD https://github.com/operator-framework/operator-sdk/releases/download/v1.15.0/operator-sdk_linux_amd64 /usr/bin/operator-sdk
37 | RUN chmod +x /usr/bin/operator-sdk
38 |
39 | RUN update-alternatives --set python3 $(which python3.12)
40 |
41 | # Adjust storage.conf to enable Fuse storage.
42 | RUN sed -i -e 's|^#mount_program|mount_program|g' /etc/containers/storage.conf
43 | COPY docker/libpod.conf /usr/share/containers/libpod.conf
44 |
45 | COPY . .
46 |
47 | # default python3-pip version for rhel8 python3.6 is 9.0.3 and it can't be updated by dnf
48 | # we have to update it by pip to version above 21.0.0
49 | RUN pip3 install --upgrade pip
50 | RUN pip3 install -r requirements.txt --no-deps --require-hashes
51 | RUN pip3 install . --no-deps
52 | CMD ["/bin/celery-3", "-A", "iib.workers.tasks", "worker", "--loglevel=info"]
53 |
--------------------------------------------------------------------------------
/docker/iib-httpd.conf:
--------------------------------------------------------------------------------
1 | ServerRoot "/etc/httpd"
2 | PidFile /tmp/httpd.pid
3 | Listen 0.0.0.0:8080 http
4 | User apache
5 | Group apache
6 | DocumentRoot "/var/www/html"
7 | ErrorLog /dev/stderr
8 | TransferLog /dev/stdout
9 | LogLevel warn
10 | TypesConfig /etc/mime.types
11 | DefaultRuntimeDir /tmp
12 | Include conf.modules.d/*.conf
13 | # ServerName iib.domain.local
14 |
15 | WSGISocketPrefix /tmp/wsgi
16 | WSGIDaemonProcess iib threads=5 home=/tmp
17 | WSGIScriptAlias / /src/iib/web/wsgi.py
18 | WSGICallableObject app
19 |
20 |
21 | AllowOverride None
22 |
23 |
24 |
25 | WSGIProcessGroup iib
26 | WSGIApplicationGroup %{GLOBAL}
27 |
28 | Require all granted
29 |
30 |
--------------------------------------------------------------------------------
/docker/libpod.conf:
--------------------------------------------------------------------------------
1 | runtime = "runc"
2 |
--------------------------------------------------------------------------------
/docker/message_broker/Dockerfile:
--------------------------------------------------------------------------------
1 | # Based from https://github.com/rmohr/docker-activemq
2 | FROM registry.access.redhat.com/ubi8/ubi:latest
3 |
4 | ENV ACTIVEMQ_VERSION 5.15.12
5 | ENV ACTIVEMQ_HOME /opt/activemq
6 |
7 | # keeping container image updated (security updates)
8 | RUN dnf install java-1.8.0-openjdk maven -y && dnf update -y
9 |
10 | RUN curl "https://archive.apache.org/dist/activemq/$ACTIVEMQ_VERSION/apache-activemq-$ACTIVEMQ_VERSION-bin.tar.gz" -o activemq-bin.tar.gz
11 | RUN tar xzf activemq-bin.tar.gz -C /opt && \
12 | ln -s /opt/apache-activemq-$ACTIVEMQ_VERSION $ACTIVEMQ_HOME
13 | COPY ./docker/message_broker/activemq.xml $ACTIVEMQ_HOME/conf/activemq.xml
14 | RUN useradd -r -M -d $ACTIVEMQ_HOME activemq && \
15 | chown -R activemq:activemq /opt/$ACTIVEMQ && \
16 | chown -h activemq:activemq $ACTIVEMQ_HOME
17 | USER activemq
18 |
19 | WORKDIR $ACTIVEMQ_HOME
20 | # Expose the AMQP, AMQPS, and web console ports
21 | EXPOSE 5761 5762 8161
22 |
23 | CMD ["/opt/activemq/bin/activemq", "console"]
24 |
--------------------------------------------------------------------------------
/docker/message_broker/README.md:
--------------------------------------------------------------------------------
1 | # Apache ActiveMQ Files
2 |
3 | These files are only to be used in a development environment. Do **not** use these in production!
4 |
5 | ## Descriptions of the Files
6 |
7 | * `certs/broker.ks` - the KeyStore used by ActiveMQ for SSL connections.
8 | * `certs/ca.crt` - the certificate authority used to sign the SSL certificates.
9 | * `certs/client.crt` - the certificate used by IIB to authenticate to ActiveMQ.
10 | * `certs/client.key` - the private key of the certificate used by IIB to authenticate to ActiveMQ.
11 | * `certs/truststore.ts` - the TrustStore that ActiveMQ is configured to use for trusting client
12 | certificates. This only contains the CA from `certs/ca.crt`.
13 | * `activemq.xml` - the configuration for ActiveMQ with AMQP, AMQPS, and virtual destinations
14 | enabled.
15 | * `Dockerfile` - the Dockerfile used to build the ActiveMQ container image.
16 |
17 | ## How to Regenerate the Certificates
18 |
19 | ```bash
20 | mkdir certs && cd certs
21 | openssl genrsa -out ca.key 2048
22 | openssl req -days 3650 -subj "/C=US/ST=North Carolina/L=Raleigh/O=IIB/OU=IIB/CN=Dev-CA" -new -x509 -key ca.key -out ca.crt
23 | keytool -importcert -file ca.crt -alias root_ca -keystore truststore.ts -storetype jks -storepass password -trustcacerts -noprompt
24 | openssl req -new -newkey rsa:2048 -sha256 -nodes -keyout broker.key -subj "/C=US/ST=North Carolina/L=Raleigh/O=IIB/OU=IIB/CN=broker" -out broker.csr
25 | openssl x509 -req -days 3650 -in broker.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out broker.crt
26 | openssl req -new -newkey rsa:2048 -sha256 -nodes -keyout client.key -subj "/C=US/ST=North Carolina/L=Raleigh/O=IIB/OU=IIB/CN=iib-worker" -out client.csr
27 | openssl x509 -req -days 3650 -in client.csr -CA ca.crt -CAkey ca.key -out client.crt
28 | cat broker.key broker.crt > broker_key_cert.pem
29 | openssl pkcs12 -export -in broker_key_cert.pem -out broker.ks -name broker -passout pass:password
30 | rm -f broker_key_cert.pem broker.crt broker.key broker.csr ca.key ca.srl client.csr
31 | chmod 444 *
32 | ```
33 |
--------------------------------------------------------------------------------
/docker/message_broker/activemq.xml:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
10 |
11 | file:${activemq.conf}/credentials.properties
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
--------------------------------------------------------------------------------
/docker/message_broker/certs/broker.ks:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/release-engineering/iib/5e96194c007c062777b2861ac415bee964e48c13/docker/message_broker/certs/broker.ks
--------------------------------------------------------------------------------
/docker/message_broker/certs/ca.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIIDqzCCApOgAwIBAgIUaY7POcBbbACMRSxNTELxb5i4xeQwDQYJKoZIhvcNAQEL
3 | BQAwZTELMAkGA1UEBhMCVVMxFzAVBgNVBAgMDk5vcnRoIENhcm9saW5hMRAwDgYD
4 | VQQHDAdSYWxlaWdoMQwwCgYDVQQKDANJSUIxDDAKBgNVBAsMA0lJQjEPMA0GA1UE
5 | AwwGRGV2LUNBMB4XDTIwMDUwMTIwMDA1OFoXDTMwMDQyOTIwMDA1OFowZTELMAkG
6 | A1UEBhMCVVMxFzAVBgNVBAgMDk5vcnRoIENhcm9saW5hMRAwDgYDVQQHDAdSYWxl
7 | aWdoMQwwCgYDVQQKDANJSUIxDDAKBgNVBAsMA0lJQjEPMA0GA1UEAwwGRGV2LUNB
8 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz6gX975QwPjnPFrSmQq2
9 | 5o5Pj+tsQG9y9aa5drHX9LPpLg4b06xDx27Vh6WcPZzsSx8MtWwdgLsA4dO+kR3F
10 | 6kj8Ta1BQWMoCL3c/hwn4XmPk4orpOHZ9d2ZBKqxTunVSOY9liaI9nzEhV1KGNHX
11 | ZuyLBocyceGg+EKQ5NoJsMKFWnGPPYW1Pe8pFj7Rj9j9376rJg/snOIM6L7zR/gi
12 | GM0BJ+BwNG2kTwb42Bza62TvT0ZtD48+2hPLNDOB6b5Ojiv5ao0QE3XVHl6adru0
13 | 9qjikKKhYIr2ZqKhND/l35VcZgmw3oQn4UC8/Lc0IYGxc4O5l2SDYzdOKeziRA2j
14 | XwIDAQABo1MwUTAdBgNVHQ4EFgQU0n4EMDiKtAjxhLlQ33Ys7CPtOjgwHwYDVR0j
15 | BBgwFoAU0n4EMDiKtAjxhLlQ33Ys7CPtOjgwDwYDVR0TAQH/BAUwAwEB/zANBgkq
16 | hkiG9w0BAQsFAAOCAQEAECPzgGBgNfRN0Mcrf319e0mDfuU2C9ogkgFV5q3O8lFf
17 | B7rOh5x24WjKV5pnkGNsnGohuQnIIIKgdu+o6ouc0PItylRnJdcmJldWpw2KMTdM
18 | uVXCaQPKQ5+xZaHyDLokvoIPtnVNo+kOQWmLjfD2iSxQLvMDrMJUjNUVo7WSZepN
19 | S1uib7k6KUULp4j8tnJlmRXa1nYbA9i87oT7sofFtxflwZ7dohj7drPuX1U0GQbN
20 | wHKgIbwTDz2RD83OIVQKRElQc8ls8OTQFbKrMv1LPZw3oiET7JZUBaVdaxK4fYun
21 | ilJJWg5GyfDUJOdvY8NBgqBgkNGRw6Wp1CCzmhrM+w==
22 | -----END CERTIFICATE-----
23 |
--------------------------------------------------------------------------------
/docker/message_broker/certs/client.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIIDVTCCAj0CFGwAej8nBFHk9AcyjQiZuL6QNFtAMA0GCSqGSIb3DQEBCwUAMGUx
3 | CzAJBgNVBAYTAlVTMRcwFQYDVQQIDA5Ob3J0aCBDYXJvbGluYTEQMA4GA1UEBwwH
4 | UmFsZWlnaDEMMAoGA1UECgwDSUlCMQwwCgYDVQQLDANJSUIxDzANBgNVBAMMBkRl
5 | di1DQTAeFw0yMDA1MDEyMDAwNThaFw0zMDA0MjkyMDAwNThaMGkxCzAJBgNVBAYT
6 | AlVTMRcwFQYDVQQIDA5Ob3J0aCBDYXJvbGluYTEQMA4GA1UEBwwHUmFsZWlnaDEM
7 | MAoGA1UECgwDSUlCMQwwCgYDVQQLDANJSUIxEzARBgNVBAMMCmlpYi13b3JrZXIw
8 | ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCwbsF76nDlJieaHKDSWsvt
9 | SpxkmVJAIQlFRLcvuhO1R5A1h5C/dUKo0aLKWFpc95+osDUtD8afa2TtjnQjz1rX
10 | o9W9kvQJWuX/wqZQp9oE279AFxxl+Iqj/DIWeJFN+aX7qc/0gAD3Pm8hxgvHcM8y
11 | x6WHIC/ElOH82UlyS+PjytFiMNMOn/edrakrDHLuhLvhVIsFbdXIRvQogJJMqhYh
12 | PTqeJlTbXjQ7trUhy0KS3n0VUXDY5hP1Xspxe9mxPbBERIoSITPqMfoqcEwPz4kC
13 | kccpocolILMgtNu1N5nU3/3AtSYjtiR7k7ytXcxdh0dnR/QvUcydn7/7DUs+TyV9
14 | AgMBAAEwDQYJKoZIhvcNAQELBQADggEBABUdN/rUW6gaXHdLx+8bEtbhXdzatxvE
15 | 9MUnamDHs3/sUJ3eMfqvG78LreT8yaxtxT+rpQqz6yikE1X80rGfbcR9T9tEl36o
16 | IBYL3ma14deGBWQ3eLW98YsHVp1Y1rC8xGgb3ABIQR74J67cagGvr6BABcpxS7Z8
17 | LKpSUGZGxSIXckyuu8DOL6LVb7BFkeNu3qK/PQ7rLIt93TRCDxmL76FCrpFtc4yk
18 | zbt/QsMB6B+w516fhqr9J72D6Y8+z8ofO/nNsbCBSuy3AcCH5/ZxpDCv72XymN48
19 | 7iwDfRSpXmm5ER3QeZi55pgmL7rl3nNwok0pN2aY6xYmIx+DIO9p/w4=
20 | -----END CERTIFICATE-----
21 |
--------------------------------------------------------------------------------
/docker/message_broker/certs/client.key:
--------------------------------------------------------------------------------
1 | -----BEGIN PRIVATE KEY-----
2 | MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCwbsF76nDlJiea
3 | HKDSWsvtSpxkmVJAIQlFRLcvuhO1R5A1h5C/dUKo0aLKWFpc95+osDUtD8afa2Tt
4 | jnQjz1rXo9W9kvQJWuX/wqZQp9oE279AFxxl+Iqj/DIWeJFN+aX7qc/0gAD3Pm8h
5 | xgvHcM8yx6WHIC/ElOH82UlyS+PjytFiMNMOn/edrakrDHLuhLvhVIsFbdXIRvQo
6 | gJJMqhYhPTqeJlTbXjQ7trUhy0KS3n0VUXDY5hP1Xspxe9mxPbBERIoSITPqMfoq
7 | cEwPz4kCkccpocolILMgtNu1N5nU3/3AtSYjtiR7k7ytXcxdh0dnR/QvUcydn7/7
8 | DUs+TyV9AgMBAAECggEBAKKhVSReFDaFlxeNsAh2vWc70TFByvll+l+pWZpa0XPW
9 | RW2ySpDRC+MFaiNOke0ELF2YH0wsMJAetGftWczwg/ImCQ23+c7txPieeiP6tRKp
10 | ZH81Tm4ITfOesqTX+kiACKoT+6aKn1lIabQeFBmBONy7/y6xUpbIDg++Ut9SaPw9
11 | HgTRdGh/WFJD8JaHnnD/I7zr+6sjLb9KdvxsVZGW8wR+sotlA1Vq0cUqD5cuIlPQ
12 | eyXZ+HH+nTmgTziRbdCAlSeTpFl0hRukc7Co63aD3t3Mvn9grXn9YjzvlNa9vf3B
13 | J5tmGJicxUoPaOlJspWYCKoQLY3miCMBBujHdRQCsNkCgYEA25ypZ1za0aLfjJ2l
14 | RXfF1W2k4Y1/QEX0z+GFM5w2ifEtQ3Yir9iSApLmOua33MtN004vufBsBIXUl0mf
15 | 1AGH1zM0wzlNInlPp+xvmwwUlPUOLugay2FywiZ7zp+jiL4D/w+SmzcLR2As76J9
16 | GQwHoyYCzMmUnF5XKjPTg2slk5cCgYEAzaqLpQw6su10iim22/7arfRaSV6UEj+P
17 | ITA+yIwso8Ke2R55CNUj0PjQhm4bgI25tJ/+9smYqxgeLVImJ7LRsxiZ86kMfJHN
18 | 8NIvL+Tbbwp9bo5vz7BnGCZDC2nL0KCdK1i4ezduiT0/2i9WHpPoy4eWTJZUBnt4
19 | lOxLrmaeYgsCgYBHG36ufR5M6pO/j/vHdnSUGqBcd9hQE1alSLw8bcQq/fBN+55X
20 | maQNOUtanwHBeAKG/LXvWq1e+YZQRgJ+PtChGW9oFtGm6+ii0qeAaHTxljc1cOnc
21 | GH1z58oGqSHmUMAygfRSVYxNXBKwtMgUqBSA+vnFHTAFQXavr4A4+SR9mQKBgDJ8
22 | ozso/V/fGrSv+MuYjGYXjfJk3UR9hc1H4qGoh2zL4eK6sEuSxvANpGLbtLSiFexZ
23 | OmWqp2eYzA+kEum74+5rI5/E5+PMhxmO/zZqn9NqZ9Az9w6s5yHugpbSqc3VH7/8
24 | T40v1lqqRnpxFFr9RDByYFr8rDPLK5ajSxDM8CRxAoGBAJ/xgqEwu2Qac4iERwU8
25 | g2ziCMRM5sEft8evbqDccZNkeQUEV/W3vjK+n0IC/OLMPc1BU5/B3gDV7TNrCfru
26 | DI4VDaJYVn4gecyTzZm+Q4Hr1gdrQqmfwCzbmRF9b31PgU256K47lRiYel5pxA6G
27 | qPqVeJiMGuTCZgrGr9i/1N7D
28 | -----END PRIVATE KEY-----
29 |
--------------------------------------------------------------------------------
/docker/message_broker/certs/truststore.ts:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/release-engineering/iib/5e96194c007c062777b2861ac415bee964e48c13/docker/message_broker/certs/truststore.ts
--------------------------------------------------------------------------------
/docker/registry/README.md:
--------------------------------------------------------------------------------
1 | # Registry Files
2 |
3 | These files are only to be used in a development environment. Do not use these in production!
4 |
--------------------------------------------------------------------------------
/docker/registry/auth/htpasswd:
--------------------------------------------------------------------------------
1 | iib:$2y$05$NDmskm82uZezQiel.EKUN.mnRZjz.jAquTa.exfMvN870ucPddULu
2 |
3 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Configuration file for the Sphinx documentation builder.
4 | #
5 | # This file does only contain a selection of the most common options. For a
6 | # full list see the documentation:
7 | # http://www.sphinx-doc.org/en/master/config
8 | from datetime import datetime
9 | import os
10 | import sys
11 |
12 | import pkg_resources
13 |
14 | # -- Path setup --------------------------------------------------------------
15 | sys.path.append(os.path.abspath('../'))
16 |
17 | # -- Project information -----------------------------------------------------
18 | try:
19 | version = pkg_resources.get_distribution('iib').version
20 | except pkg_resources.DistributionNotFound:
21 | version = 'unknown'
22 | project = 'IIB Image Builder Service'
23 | copyright = datetime.today().strftime('%Y') + ', Red Hat Inc.'
24 | author = 'Red Hat - EXD'
25 |
26 | # -- General configuration ---------------------------------------------------
27 | extensions = [
28 | 'celery.contrib.sphinx',
29 | 'recommonmark',
30 | 'sphinx.ext.autodoc',
31 | 'sphinx.ext.githubpages',
32 | ]
33 | master_doc = 'index'
34 | language = 'en'
35 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
36 | pygments_style = 'sphinx'
37 |
38 | # -- Options for HTML output -------------------------------------------------
39 | html_theme = 'sphinx_rtd_theme'
40 | html_static_path = []
41 |
42 | # -- Options for HTMLHelp output ---------------------------------------------
43 | htmlhelp_basename = 'IIBdoc'
44 |
45 | # -- Extension configuration -------------------------------------------------
46 | # This must be mocked because Read the Docs doesn't have krb5-devel installed
47 | autodoc_mock_imports = ["requests_kerberos"]
48 |
49 | # -- Options for intersphinx extension ---------------------------------------
50 | # Example configuration for intersphinx: refer to the Python standard library.
51 | intersphinx_mapping = {'https://docs.python.org/3': None}
52 |
--------------------------------------------------------------------------------
/docs/gettingstarted.md:
--------------------------------------------------------------------------------
1 | ../README.md
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | :github_url: https://github.com/release-engineering/iib
2 |
3 | Welcome to the IIB documentation!
4 | =================================
5 |
6 | .. toctree::
7 | :caption: Contents
8 | :maxdepth: 2
9 |
10 | gettingstarted
11 | module_documentation/index
12 | API Documentation
13 |
14 |
15 | Indices and tables
16 | ==================
17 |
18 | * :ref:`genindex`
19 | * :ref:`modindex`
20 | * :ref:`search`
21 |
--------------------------------------------------------------------------------
/docs/module_documentation/iib.common.rst:
--------------------------------------------------------------------------------
1 | iib.common package
2 | ==================
3 |
4 | Submodules
5 | ----------
6 |
7 | iib.common.tracing module
8 | -------------------------
9 |
10 | .. automodule:: iib.common.tracing
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | Module contents
16 | ---------------
17 |
18 | .. automodule:: iib.common
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
--------------------------------------------------------------------------------
/docs/module_documentation/iib.rst:
--------------------------------------------------------------------------------
1 | iib
2 | ===
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | iib.common
10 | iib.web
11 | iib.workers
12 |
13 | Submodules
14 | ----------
15 |
16 | iib.exceptions module
17 | ---------------------
18 |
19 | .. automodule:: iib.exceptions
20 | :ignore-module-all:
21 | :members:
22 | :private-members:
23 | :show-inheritance:
24 |
25 |
26 | Module contents
27 | ---------------
28 |
29 | .. automodule:: iib
30 | :ignore-module-all:
31 | :members:
32 | :private-members:
33 | :show-inheritance:
34 |
--------------------------------------------------------------------------------
/docs/module_documentation/iib.web.rst:
--------------------------------------------------------------------------------
1 | iib.web
2 | =======
3 |
4 | Submodules
5 | ----------
6 |
7 | iib.web.api\_v1 module
8 | ----------------------
9 |
10 | .. automodule:: iib.web.api_v1
11 | :ignore-module-all:
12 | :members:
13 | :private-members:
14 | :show-inheritance:
15 |
16 | iib.web.app module
17 | ------------------
18 |
19 | .. automodule:: iib.web.app
20 | :ignore-module-all:
21 | :members:
22 | :private-members:
23 | :show-inheritance:
24 |
25 | iib.web.auth module
26 | -------------------
27 |
28 | .. automodule:: iib.web.auth
29 | :ignore-module-all:
30 | :members:
31 | :private-members:
32 | :show-inheritance:
33 |
34 | iib.web.config module
35 | ---------------------
36 |
37 | .. automodule:: iib.web.config
38 | :ignore-module-all:
39 | :members:
40 | :private-members:
41 | :show-inheritance:
42 |
43 | iib.web.docs module
44 | -------------------
45 |
46 | .. automodule:: iib.web.docs
47 | :ignore-module-all:
48 | :members:
49 | :private-members:
50 | :show-inheritance:
51 |
52 | iib.web.errors module
53 | ---------------------
54 |
55 | .. automodule:: iib.web.errors
56 | :ignore-module-all:
57 | :members:
58 | :private-members:
59 | :show-inheritance:
60 |
61 | iib.web.iib\_static\_types module
62 | ---------------------------------
63 |
64 | .. automodule:: iib.web.iib_static_types
65 | :members:
66 | :undoc-members:
67 | :show-inheritance:
68 |
69 | iib.web.manage module
70 | ---------------------
71 |
72 | .. automodule:: iib.web.manage
73 | :ignore-module-all:
74 | :members:
75 | :private-members:
76 | :show-inheritance:
77 |
78 | iib.web.messaging module
79 | ------------------------
80 |
81 | .. automodule:: iib.web.messaging
82 | :members:
83 | :undoc-members:
84 | :show-inheritance:
85 |
86 | iib.web.models module
87 | ---------------------
88 |
89 | .. automodule:: iib.web.models
90 | :ignore-module-all:
91 | :members:
92 | :private-members:
93 | :show-inheritance:
94 |
95 | iib.web.s3\_utils module
96 | ------------------------
97 |
98 | .. automodule:: iib.web.s3_utils
99 | :members:
100 | :undoc-members:
101 | :show-inheritance:
102 |
103 | iib.web.utils module
104 | --------------------
105 |
106 | .. automodule:: iib.web.utils
107 | :ignore-module-all:
108 | :members:
109 | :private-members:
110 | :show-inheritance:
111 |
112 | iib.web.wsgi module
113 | -------------------
114 |
115 | .. automodule:: iib.web.wsgi
116 | :ignore-module-all:
117 | :members:
118 | :private-members:
119 | :show-inheritance:
120 |
121 | Module contents
122 | ---------------
123 |
124 | .. automodule:: iib.web
125 | :ignore-module-all:
126 | :members:
127 | :private-members:
128 | :show-inheritance:
129 |
--------------------------------------------------------------------------------
/docs/module_documentation/iib.workers.rst:
--------------------------------------------------------------------------------
1 | iib.workers
2 | ===========
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | iib.workers.tasks
10 |
11 | Submodules
12 | ----------
13 |
14 | iib.workers.api\_utils module
15 | -----------------------------
16 |
17 | .. automodule:: iib.workers.api_utils
18 | :ignore-module-all:
19 | :members:
20 | :private-members:
21 | :show-inheritance:
22 |
23 | iib.workers.config module
24 | -------------------------
25 |
26 | .. automodule:: iib.workers.config
27 | :ignore-module-all:
28 | :members:
29 | :private-members:
30 | :show-inheritance:
31 |
32 | iib.workers.dogpile\_cache module
33 | ---------------------------------
34 |
35 | .. automodule:: iib.workers.dogpile_cache
36 | :members:
37 | :undoc-members:
38 | :show-inheritance:
39 |
40 | iib.workers.greenwave module
41 | ----------------------------
42 |
43 | .. automodule:: iib.workers.greenwave
44 | :members:
45 | :undoc-members:
46 | :show-inheritance:
47 |
48 | iib.workers.s3\_utils module
49 | ----------------------------
50 |
51 | .. automodule:: iib.workers.s3_utils
52 | :members:
53 | :undoc-members:
54 | :show-inheritance:
55 |
56 | Module contents
57 | ---------------
58 |
59 | .. automodule:: iib.workers
60 | :ignore-module-all:
61 | :members:
62 | :private-members:
63 | :show-inheritance:
64 |
--------------------------------------------------------------------------------
/docs/module_documentation/iib.workers.tasks.rst:
--------------------------------------------------------------------------------
1 | iib.workers.tasks
2 | =================
3 |
4 | Submodules
5 | ----------
6 |
7 | iib.workers.tasks.build module
8 | ------------------------------
9 |
10 | .. automodule:: iib.workers.tasks.build
11 | :ignore-module-all:
12 | :members:
13 | :private-members:
14 | :show-inheritance:
15 |
16 | iib.workers.tasks.build\_create\_empty\_index module
17 | ----------------------------------------------------
18 |
19 | .. automodule:: iib.workers.tasks.build_create_empty_index
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
24 | iib.workers.tasks.build\_fbc\_operations module
25 | -----------------------------------------------
26 |
27 | .. automodule:: iib.workers.tasks.build_fbc_operations
28 | :members:
29 | :undoc-members:
30 | :show-inheritance:
31 |
32 | iib.workers.tasks.build\_merge\_index\_image module
33 | ---------------------------------------------------
34 |
35 | .. automodule:: iib.workers.tasks.build_merge_index_image
36 | :members:
37 | :undoc-members:
38 | :show-inheritance:
39 |
40 | iib.workers.tasks.build\_recursive\_related\_bundles module
41 | -----------------------------------------------------------
42 |
43 | .. automodule:: iib.workers.tasks.build_recursive_related_bundles
44 | :members:
45 | :undoc-members:
46 | :show-inheritance:
47 |
48 | iib.workers.tasks.build\_regenerate\_bundle module
49 | --------------------------------------------------
50 |
51 | .. automodule:: iib.workers.tasks.build_regenerate_bundle
52 | :members:
53 | :undoc-members:
54 | :show-inheritance:
55 |
56 | iib.workers.tasks.celery module
57 | -------------------------------
58 |
59 | .. automodule:: iib.workers.tasks.celery
60 | :ignore-module-all:
61 | :members:
62 | :private-members:
63 | :show-inheritance:
64 |
65 | iib.workers.tasks.fbc\_utils module
66 | -----------------------------------
67 |
68 | .. automodule:: iib.workers.tasks.fbc_utils
69 | :members:
70 | :undoc-members:
71 | :show-inheritance:
72 |
73 | iib.workers.tasks.general module
74 | --------------------------------
75 |
76 | .. automodule:: iib.workers.tasks.general
77 | :ignore-module-all:
78 | :members:
79 | :private-members:
80 | :show-inheritance:
81 |
82 | iib.workers.tasks.iib\_static\_types module
83 | -------------------------------------------
84 |
85 | .. automodule:: iib.workers.tasks.iib_static_types
86 | :members:
87 | :undoc-members:
88 | :show-inheritance:
89 |
90 | iib.workers.tasks.opm\_operations module
91 | ----------------------------------------
92 |
93 | .. automodule:: iib.workers.tasks.opm_operations
94 | :members:
95 | :undoc-members:
96 | :show-inheritance:
97 |
98 | iib.workers.tasks.utils module
99 | ------------------------------
100 |
101 | .. automodule:: iib.workers.tasks.utils
102 | :ignore-module-all:
103 | :members:
104 | :private-members:
105 | :show-inheritance:
106 |
107 | Module contents
108 | ---------------
109 |
110 | .. automodule:: iib.workers.tasks
111 | :ignore-module-all:
112 | :members:
113 | :private-members:
114 | :show-inheritance:
115 |
--------------------------------------------------------------------------------
/docs/module_documentation/index.rst:
--------------------------------------------------------------------------------
1 | =====================
2 | Modules Documentation
3 | =====================
4 |
5 | .. toctree::
6 | :maxdepth: 2
7 |
8 | iib
9 | iib.web
10 | iib.workers
11 | iib.workers.tasks
12 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3
2 | celery
3 | dogpile.cache
4 | flask
5 | flask-login
6 | flask-migrate
7 | flask-sqlalchemy
8 | opentelemetry-api
9 | opentelemetry-exporter-otlp
10 | opentelemetry-instrumentation
11 | opentelemetry-instrumentation-botocore
12 | opentelemetry-instrumentation-celery
13 | opentelemetry-instrumentation-flask
14 | opentelemetry-instrumentation-logging
15 | opentelemetry-instrumentation-requests
16 | opentelemetry-instrumentation-sqlalchemy
17 | opentelemetry-instrumentation-wsgi
18 | opentelemetry-sdk
19 | operator-manifest
20 | psycopg2-binary
21 | python-memcached
22 | python-qpid-proton
23 | recommonmark
24 | requests
25 | ruamel.yaml
26 | setuptools
27 | sphinx==8.2.3
28 | sphinx_rtd_theme
29 | tenacity
30 | typing-extensions
31 | werkzeug==3.1.3
--------------------------------------------------------------------------------
/iib/__init__.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 |
--------------------------------------------------------------------------------
/iib/common/__init__.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 |
--------------------------------------------------------------------------------
/iib/common/common_utils.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from typing import Dict
3 | from iib.workers.config import get_worker_config
4 |
5 |
6 | def get_binary_versions() -> Dict:
7 | """
8 | Return string containing version of binary files used by IIB.
9 |
10 | :return: Dictionary with all binary used and their version
11 | :rtype: dict
12 | """
13 | from iib.workers.tasks.utils import run_cmd
14 |
15 | podman_version_cmd = ['podman', '-v']
16 | buildah_version_cmd = ['buildah', '-v']
17 |
18 | worker_config = get_worker_config()
19 | iib_ocp_opm_mapping = worker_config.get("iib_ocp_opm_mapping")
20 | opm_versions_available = set()
21 | opm_versions_available.add(worker_config.get('iib_default_opm'))
22 | if iib_ocp_opm_mapping is not None:
23 | opm_versions_available.update(set(iib_ocp_opm_mapping.values()))
24 |
25 | try:
26 | return {
27 | 'opm': [
28 | run_cmd([opm_path, 'version'], exc_msg='Failed to get opm version.').strip()
29 | for opm_path in opm_versions_available
30 | ],
31 | 'podman': run_cmd(podman_version_cmd, exc_msg='Failed to get podman version.').strip(),
32 | 'buildah': run_cmd(
33 | buildah_version_cmd, exc_msg='Failed to get buildah version.'
34 | ).strip(),
35 | }
36 | except FileNotFoundError:
37 | return {'opm': '', 'podman': '', 'buildah': ''}
38 |
--------------------------------------------------------------------------------
/iib/common/tracing.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 |
3 | """Configures the Global Tracer Provider and exports the traces to the OpenTelemetry Collector.
4 |
5 | The OpenTelemetry Collector is configured to receive traces via OTLP over HTTP.
6 | The OTLP exporter is configured to use the environment variables defined in the ansible playbook.
7 |
8 | Usage:
9 | @instrument_tracing()
10 | def func():
11 | pass
12 |
13 | """
14 | import json
15 | import os
16 | import functools
17 | import getpass
18 | import logging
19 | import socket
20 | from copy import deepcopy
21 | from typing import Any, Dict
22 |
23 |
24 | from flask import Response
25 | from opentelemetry import trace
26 | from opentelemetry.trace import SpanKind, Status, StatusCode
27 | from opentelemetry.sdk.resources import Resource, SERVICE_NAME
28 | from opentelemetry.sdk.trace import TracerProvider
29 | from opentelemetry.sdk.trace.export import (
30 | BatchSpanProcessor,
31 | )
32 | from opentelemetry.propagate import set_global_textmap
33 | from opentelemetry.trace.propagation import (
34 | set_span_in_context,
35 | )
36 | from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
37 | from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
38 | from opentelemetry.util.types import Attributes
39 |
40 |
41 | log = logging.getLogger(__name__)
42 | propagator = TraceContextTextMapPropagator()
43 |
44 |
45 | def normalize_data_for_span(data: Dict[str, Any]) -> Attributes:
46 | """
47 | Normalize any dictionary to a open-telemetry usable dictionary.
48 |
49 | :param Dict[str, Any] data: The dictionary to be converted.
50 | :return: Normalized dictionary.
51 | :rtype: Attributes
52 | """
53 | span_data = deepcopy(data)
54 | for key, value in span_data.items():
55 | if type(value) in [type(None), dict, list]:
56 | span_data[key] = str(value)
57 | return span_data
58 |
59 |
60 | class TracingWrapper:
61 | """Wrapper class that will wrap all methods of calls with the instrument_tracing decorator."""
62 |
63 | __instance = None
64 |
65 | def __new__(cls):
66 | """Create a new instance if one does not exist."""
67 | if not os.getenv('IIB_OTEL_TRACING', '').lower() == 'true':
68 | return None
69 |
70 | if TracingWrapper.__instance is None:
71 | log.info('Creating TracingWrapper instance')
72 | cls.__instance = super().__new__(cls)
73 | otlp_exporter = OTLPSpanExporter(
74 | endpoint=f"{os.getenv('OTEL_EXPORTER_OTLP_ENDPOINT')}/v1/traces",
75 | )
76 | cls.provider = TracerProvider(
77 | resource=Resource.create({SERVICE_NAME: os.getenv('OTEL_SERVICE_NAME')})
78 | )
79 | cls.processor = BatchSpanProcessor(otlp_exporter)
80 | cls.provider.add_span_processor(cls.processor)
81 | trace.set_tracer_provider(cls.provider)
82 | set_global_textmap(propagator)
83 | cls.tracer = trace.get_tracer(__name__)
84 | return cls.__instance
85 |
86 |
87 | def instrument_tracing(
88 | span_name: str = '',
89 | attributes: Dict = {},
90 | ):
91 | """
92 | Instrument tracing for a function.
93 |
94 | :param span_name: The name of the span to be created.
95 | :param attributes: The attributes to be added to the span.
96 | :return: The decorated function or class.
97 | """
98 |
99 | def decorator_instrument_tracing(func):
100 | @functools.wraps(func)
101 | def wrapper(*args, **kwargs):
102 | if not os.getenv('IIB_OTEL_TRACING', '').lower() == 'true':
103 | return func(*args, **kwargs)
104 |
105 | log.info('Instrumenting span for %s', span_name)
106 | tracer = trace.get_tracer(__name__)
107 | if trace.get_current_span():
108 | context = trace.get_current_span().get_span_context()
109 | else:
110 | context = propagator.extract(carrier={})
111 |
112 | log.debug('Context inside %s: %s', span_name, context)
113 | if kwargs.get('traceparent'):
114 | log.debug('traceparent is %s' % str(kwargs.get('traceparent')))
115 | carrier = {'traceparent': kwargs.get('traceparent')}
116 | trace_context = propagator.extract(carrier)
117 | log.debug('Context is %s', trace_context)
118 | with tracer.start_as_current_span(
119 | span_name or func.__name__, kind=SpanKind.SERVER
120 | ) as span:
121 | for attr in attributes:
122 | span.set_attribute(attr, attributes[attr])
123 | span.set_attribute('host', socket.getfqdn())
124 | span.set_attribute('user', getpass.getuser())
125 |
126 | if func.__name__: # If the function has a name
127 | log.debug('function_name %s', func.__name__)
128 | span.set_attribute('function_name', func.__name__)
129 | try:
130 | result = func(*args, **kwargs)
131 | if isinstance(result, dict):
132 | span_result = normalize_data_for_span(result)
133 | elif isinstance(result, tuple) and isinstance(result[0], Response):
134 | response = json.dumps(result[0].json)
135 | code = result[1]
136 | span_result = {'response': response, 'http_code': code}
137 | else:
138 | # If the returned result is not of type dict, create one
139 | span_result = {'result': str(result) or 'success'}
140 | except Exception as exc:
141 | span.set_status(Status(StatusCode.ERROR))
142 | span.record_exception(exc)
143 | raise
144 | else:
145 | if span_result:
146 | log.debug('result %s', span_result)
147 | span.set_attributes(span_result)
148 | if kwargs:
149 | # Need to handle all the types of kwargs
150 | if "task_id" in kwargs:
151 | log.debug('task_id is %s' % kwargs['task_id'])
152 | span.set_attribute('task_id', kwargs['task_id'])
153 | if "task_name" in kwargs:
154 | log.debug('task_name is %s' % kwargs['task_name'])
155 | span.set_attribute('task_name', kwargs['task_name'])
156 | if "task_type" in kwargs:
157 | log.debug('task_type is %s' % kwargs['task_type'])
158 | span.set_attribute('task_type', kwargs['task_type'])
159 | span.add_event(f'{func.__name__} executed', span_result)
160 | span.set_status(Status(StatusCode.OK))
161 | finally:
162 | # Add the span context from the current span to the link
163 | span_id = span.get_span_context().span_id
164 | trace_id = span.get_span_context().trace_id
165 | # Syntax of traceparent is f'00-{trace_id}-{span_id}-01'
166 | traceparent = f'00-{trace_id}-{span_id}-01'
167 | headers = {'traceparent': traceparent}
168 | propagator.inject(headers)
169 | log.debug('Headers are: %s', headers)
170 | set_span_in_context(span)
171 |
172 | return result
173 |
174 | return wrapper
175 |
176 | return decorator_instrument_tracing
177 |
--------------------------------------------------------------------------------
/iib/exceptions.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 |
3 |
4 | class BaseException(Exception):
5 | """The base class for all IIB exceptions."""
6 |
7 |
8 | class ConfigError(BaseException):
9 | """The configuration is invalid."""
10 |
11 |
12 | class IIBError(BaseException):
13 | """An error was encountered in IIB."""
14 |
15 |
16 | class ValidationError(BaseException):
17 | """Denote invalid input."""
18 |
19 |
20 | class AddressAlreadyInUse(BaseException):
21 | """Adress is already used by other service."""
22 |
23 |
24 | class ExternalServiceError(BaseException):
25 | """An external service error occurred with HTTP 403 or HTTP 50X."""
26 |
--------------------------------------------------------------------------------
/iib/web/__init__.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 |
3 | from flask_sqlalchemy import SQLAlchemy
4 |
5 |
6 | db = SQLAlchemy()
7 |
--------------------------------------------------------------------------------
/iib/web/auth.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from typing import Optional
3 |
4 | from flask import current_app, Request
5 |
6 | from iib.web import db
7 | from iib.web.models import User
8 |
9 |
10 | def user_loader(username: str) -> Optional[User]:
11 | """
12 | Get the user by their username from the database.
13 |
14 | This is used by the Flask-Login library.
15 |
16 | :param str username: the username of the user
17 | :return: the User object associated with the username or None
18 | :rtype: iib.web.models.User
19 | """
20 | return User.query.filter_by(username=username).first()
21 |
22 |
23 | def _get_kerberos_principal(request: Request) -> Optional[str]:
24 | """
25 | Get the Kerberos principal from the current request.
26 |
27 | This relies on the "REMOTE_USER" environment variable being set. This is usually set by the
28 | mod_auth_gssapi Apache authentication module.
29 |
30 | :param flask.Request request: the Flask request
31 | :return: the user's Kerberos principal or None
32 | :rtype: str
33 | """
34 | return request.environ.get('REMOTE_USER')
35 |
36 |
37 | def load_user_from_request(request: Request) -> Optional[User]:
38 | """
39 | Load the user that authenticated from the current request.
40 |
41 | This is used by the Flask-Login library. If the user does not exist in the database, an entry
42 | will be created.
43 |
44 | If None is returned, then Flask-Login will set `flask_login.current_user` to an
45 | `AnonymousUserMixin` object, which has the `is_authenticated` property set to `False`.
46 | Additionally, any route decorated with `@login_required` will raise an `Unauthorized` exception.
47 |
48 | :param flask.Request request: the Flask request
49 | :return: the User object associated with the username or None
50 | :rtype: iib.web.models.User
51 | """
52 | username = _get_kerberos_principal(request)
53 | if not username:
54 | if current_app.config.get('LOGIN_DISABLED', False) is True:
55 | current_app.logger.info(
56 | 'The REMOTE_USER environment variable wasn\'t set on the request, but the '
57 | 'LOGIN_DISABLED configuration is set to True.'
58 | )
59 | return None
60 |
61 | current_app.logger.info(f'The user "{username}" was authenticated successfully by httpd')
62 | user = User.get_or_create(username)
63 | if not user.id:
64 | db.session.commit()
65 |
66 | return user
67 |
--------------------------------------------------------------------------------
/iib/web/config.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import os
3 | import tempfile
4 | from typing import Dict, List, Optional, Union
5 |
6 | TEST_DB_FILE = os.path.join(tempfile.gettempdir(), 'iib_recursive.db')
7 |
8 |
9 | def _get_empty_dict_str_str() -> Dict[str, str]:
10 | # solving mypy error: Incompatible types in assignment
11 | # (expression has type "Dict[, ]",
12 | # variable has type "Union[Dict[str, str], Dict[str, Dict[str, str]]]")
13 | return {}
14 |
15 |
16 | class Config(object):
17 | """The base IIB Flask configuration."""
18 |
19 | # Additional loggers to set to the level defined in IIB_LOG_LEVEL
20 | IIB_ADDITIONAL_LOGGERS: List[str] = []
21 | IIB_AWS_S3_BUCKET_NAME: Optional[str] = None
22 | IIB_BINARY_IMAGE_CONFIG: Dict[str, Dict[str, str]] = {}
23 | IIB_INDEX_TO_GITLAB_PUSH_MAP: Dict[str, str] = {}
24 | IIB_GRAPH_MODE_INDEX_ALLOW_LIST: List[str] = []
25 | IIB_GRAPH_MODE_OPTIONS: List[str] = ['replaces', 'semver', 'semver-skippatch']
26 | IIB_GREENWAVE_CONFIG: Dict[str, str] = {}
27 | IIB_LOG_FORMAT: str = '%(asctime)s %(name)s %(levelname)s %(module)s.%(funcName)s %(message)s'
28 | # This sets the level of the "flask.app" logger, which is accessed from current_app.logger
29 | IIB_LOG_LEVEL: str = 'INFO'
30 | IIB_MAX_PER_PAGE: int = 20
31 | IIB_MESSAGING_CA: str = '/etc/pki/tls/certs/ca-bundle.crt'
32 | IIB_MESSAGING_CERT: str = '/etc/iib/messaging.crt'
33 | IIB_MESSAGING_DURABLE: bool = True
34 | IIB_MESSAGING_KEY: str = '/etc/iib/messaging.key'
35 | IIB_MESSAGING_TIMEOUT: int = 30
36 | IIB_REQUEST_DATA_DAYS_TO_LIVE: int = 3
37 | IIB_REQUEST_LOGS_DIR: Optional[str] = None
38 | IIB_REQUEST_RELATED_BUNDLES_DIR: Optional[str] = None
39 | IIB_REQUEST_RECURSIVE_RELATED_BUNDLES_DIR: Optional[str] = None
40 | IIB_USER_TO_QUEUE: Union[Dict[str, str], Dict[str, Dict[str, str]]] = _get_empty_dict_str_str()
41 | IIB_WORKER_USERNAMES: List[str] = []
42 | SQLALCHEMY_TRACK_MODIFICATIONS: bool = False
43 |
44 |
45 | class ProductionConfig(Config):
46 | """The production IIB Flask configuration."""
47 |
48 | DEBUG: bool = False
49 |
50 |
51 | class DevelopmentConfig(Config):
52 | """The development IIB Flask configuration."""
53 |
54 | IIB_LOG_LEVEL: str = 'DEBUG'
55 | IIB_MESSAGING_BATCH_STATE_DESTINATION: str = 'topic://VirtualTopic.eng.iib.batch.state'
56 | IIB_MESSAGING_BUILD_STATE_DESTINATION: str = 'topic://VirtualTopic.eng.iib.build.state'
57 | IIB_MESSAGING_CA: str = '/etc/iib/messaging-ca.crt'
58 | IIB_MESSAGING_URLS: List[str] = ['amqps://message-broker:5671']
59 | IIB_REQUEST_LOGS_DIR: str = '/var/log/iib/requests'
60 | IIB_REQUEST_RELATED_BUNDLES_DIR: str = '/var/lib/requests/related_bundles'
61 | IIB_REQUEST_RECURSIVE_RELATED_BUNDLES_DIR: str = '/var/lib/requests/recursive_related_bundles'
62 | SQLALCHEMY_DATABASE_URI: str = 'postgresql+psycopg2://iib:iib@db:5432/iib'
63 | LOGIN_DISABLED: bool = True
64 |
65 |
66 | class TestingConfig(DevelopmentConfig):
67 | """The testing IIB Flask configuration."""
68 |
69 | DEBUG: bool = True
70 | IIB_WORKER_USERNAMES: List[str] = ['worker@DOMAIN.LOCAL']
71 | # IMPORTANT: don't use in-memory sqlite. Alembic migrations will create a new
72 | # connection producing a new instance of the database which is deleted immediately
73 | # after the migration completes...
74 | # https://github.com/miguelgrinberg/Flask-Migrate/issues/153
75 | SQLALCHEMY_DATABASE_URI: str = f'sqlite:///{TEST_DB_FILE}'
76 | LOGIN_DISABLED: bool = False
77 |
78 |
79 | class TestingConfigNoAuth(TestingConfig):
80 | """The testing IIB Flask configuration without authentication."""
81 |
82 | # This is needed because Flask seems to read the LOGIN_DISABLED setting
83 | # and configure the relevant extensions at app creation time. Changing this
84 | # during a test run still leaves login enabled. This behavior also applies
85 | # to ENV and DEBUG config values:
86 | # https://flask.palletsprojects.com/en/1.1.x/config/#environment-and-debug-features
87 | LOGIN_DISABLED: bool = True
88 |
--------------------------------------------------------------------------------
/iib/web/docs.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from flask import Blueprint, send_from_directory, Response
3 |
4 |
5 | docs = Blueprint('docs', __name__)
6 |
7 |
8 | @docs.route('/', methods=['GET'])
9 | def index() -> Response:
10 | """Return the OpenAPI documentation presented by redoc."""
11 | return send_from_directory('static', 'docs.html')
12 |
--------------------------------------------------------------------------------
/iib/web/errors.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from typing import List, NoReturn
3 |
4 | import kombu.exceptions
5 | from flask import current_app, jsonify, Response
6 | from werkzeug.exceptions import HTTPException
7 |
8 | from iib.exceptions import IIBError, ValidationError
9 | from iib.web import messaging, db
10 | from iib.web.models import Request
11 |
12 |
13 | def json_error(error: Exception) -> Response:
14 | """
15 | Convert exceptions to JSON responses.
16 |
17 | :param Exception error: an Exception to convert to JSON
18 | :return: a Flask JSON response
19 | :rtype: flask.Response
20 | """
21 | if isinstance(error, HTTPException):
22 | if error.code == 404:
23 | msg = 'The requested resource was not found'
24 | else:
25 | msg = str(error.description)
26 | response = jsonify({'error': msg})
27 | # None cannot be set to response.status_code
28 | if error.code:
29 | response.status_code = error.code
30 | else:
31 | status_code = 500
32 | msg = str(error)
33 | if isinstance(error, ValidationError):
34 | status_code = 400
35 | elif isinstance(error, kombu.exceptions.KombuError):
36 | msg = 'Failed to connect to the broker to schedule a task'
37 |
38 | response = jsonify({'error': msg})
39 | response.status_code = status_code
40 | return response
41 |
42 |
43 | def handle_broker_error(request: Request) -> NoReturn:
44 | """
45 | Handle broker errors by setting the request as failed and raise an IIBError exception.
46 |
47 | :param Request request: Request which will be set as failed
48 | :raises IIBError: Raises IIBError exception after setting request to failed state
49 | """
50 | request.add_state('failed', 'The scheduling of the request failed')
51 | db.session.commit()
52 | messaging.send_message_for_state_change(request)
53 |
54 | error_message = f'The scheduling of the build request with ID {request.id} failed'
55 | current_app.logger.exception(error_message)
56 |
57 | raise IIBError(error_message)
58 |
59 |
60 | def handle_broker_batch_error(requests: List[Request]) -> NoReturn:
61 | """
62 | Handle broker errors by setting all requests as failed and raise an IIBError exception.
63 |
64 | :param list requests: list of all requests that should be marked as failed
65 | :raises IIBError: Raises IIBError exception after setting all requests to failed state
66 | """
67 | failed_ids = []
68 | for req in requests:
69 | failed_ids.append(str(req.id))
70 | req.add_state('failed', 'The scheduling of the request failed')
71 | messaging.send_message_for_state_change(req)
72 |
73 | db.session.commit()
74 | error_message = f'The scheduling of the build requests with IDs {", ".join(failed_ids)} failed'
75 | current_app.logger.exception(error_message)
76 |
77 | raise IIBError(error_message)
78 |
--------------------------------------------------------------------------------
/iib/web/manage.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import time
3 |
4 | import click
5 | from flask.cli import FlaskGroup
6 | from sqlalchemy.exc import OperationalError
7 |
8 | from iib.web import db
9 | from iib.web.app import create_app
10 |
11 |
12 | @click.group(cls=FlaskGroup, create_app=create_app)
13 | def cli() -> None:
14 | """Manage the IIB Flask application."""
15 |
16 |
17 | @cli.command(name='wait-for-db')
18 | def wait_for_db() -> None:
19 | """Wait until database server is reachable."""
20 | # The polling interval in seconds
21 | poll_interval = 10
22 | while True:
23 | try:
24 | db.engine.connect()
25 | except OperationalError as e:
26 | click.echo('Failed to connect to database: {}'.format(e), err=True)
27 | click.echo('Sleeping for {} seconds...'.format(poll_interval))
28 | time.sleep(poll_interval)
29 | click.echo('Retrying...')
30 | else:
31 | break
32 |
33 |
34 | if __name__ == '__main__':
35 | cli()
36 |
--------------------------------------------------------------------------------
/iib/web/migrations/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # template used to generate migration files
5 | # file_template = %%(rev)s_%%(slug)s
6 |
7 | # set to 'true' to run the environment during
8 | # the 'revision' command, regardless of autogenerate
9 | # revision_environment = false
10 |
11 |
12 | # Logging configuration
13 | [loggers]
14 | keys = root,sqlalchemy,alembic
15 |
16 | [handlers]
17 | keys = console
18 |
19 | [formatters]
20 | keys = generic
21 |
22 | [logger_root]
23 | level = WARN
24 | handlers = console
25 | qualname =
26 |
27 | [logger_sqlalchemy]
28 | level = WARN
29 | handlers =
30 | qualname = sqlalchemy.engine
31 |
32 | [logger_alembic]
33 | level = INFO
34 | handlers =
35 | qualname = alembic
36 |
37 | [handler_console]
38 | class = StreamHandler
39 | args = (sys.stderr,)
40 | level = NOTSET
41 | formatter = generic
42 |
43 | [formatter_generic]
44 | format = %(levelname)-5.5s [%(name)s] %(message)s
45 | datefmt = %H:%M:%S
46 |
--------------------------------------------------------------------------------
/iib/web/migrations/env.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | # This is an autogenerated file by alembic
3 | import logging
4 | from logging.config import fileConfig
5 |
6 | from alembic import context
7 | from flask import current_app
8 | from sqlalchemy import engine_from_config
9 | from sqlalchemy import pool
10 |
11 | # this is the Alembic Config object, which provides
12 | # access to the values within the .ini file in use.
13 | config = context.config
14 |
15 | # Interpret the config file for Python logging.
16 | # This line sets up loggers basically.
17 | fileConfig(config.config_file_name)
18 | logger = logging.getLogger('alembic.env')
19 |
20 | config.set_main_option(
21 | 'sqlalchemy.url', str(current_app.config.get('SQLALCHEMY_DATABASE_URI')).replace('%', '%%')
22 | )
23 | target_metadata = current_app.extensions['migrate'].db.metadata
24 |
25 |
26 | def run_migrations_offline():
27 | """
28 | Run migrations in 'offline' mode.
29 |
30 | This configures the context with just a URL
31 | and not an Engine, though an Engine is acceptable
32 | here as well. By skipping the Engine creation
33 | we don't even need a DBAPI to be available.
34 |
35 | Calls to context.execute() here emit the given string to the
36 | script output.
37 | """
38 | url = config.get_main_option("sqlalchemy.url")
39 | context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
40 |
41 | with context.begin_transaction():
42 | context.run_migrations()
43 |
44 |
45 | def run_migrations_online():
46 | """
47 | Run migrations in 'online' mode.
48 |
49 | In this scenario we need to create an Engine
50 | and associate a connection with the context.
51 | """
52 | # this callback is used to prevent an auto-migration from being generated
53 | # when there are no changes to the schema
54 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
55 | def process_revision_directives(context, revision, directives):
56 | if getattr(config.cmd_opts, 'autogenerate', False):
57 | script = directives[0]
58 | if script.upgrade_ops.is_empty():
59 | directives[:] = []
60 | logger.info('No changes in schema detected.')
61 |
62 | connectable = engine_from_config(
63 | config.get_section(config.config_ini_section), prefix='sqlalchemy.', poolclass=pool.NullPool
64 | )
65 |
66 | with connectable.connect() as connection:
67 | context.configure(
68 | connection=connection,
69 | target_metadata=target_metadata,
70 | process_revision_directives=process_revision_directives,
71 | **current_app.extensions['migrate'].configure_args,
72 | )
73 |
74 | with context.begin_transaction():
75 | context.run_migrations()
76 |
77 |
78 | if context.is_offline_mode():
79 | run_migrations_offline()
80 | else:
81 | run_migrations_online()
82 |
--------------------------------------------------------------------------------
/iib/web/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade():
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade():
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/1920ad83d0ab_adding_ignore_bundle_ocp_version.py:
--------------------------------------------------------------------------------
1 | """Adding ignore_bundle_ocp_version.
2 |
3 | Revision ID: 1920ad83d0ab
4 | Revises: 9e9d4f9730c8
5 | Create Date: 2023-11-22 12:03:50.711489
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '1920ad83d0ab'
14 | down_revision = '9e9d4f9730c8'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | with op.batch_alter_table('request_merge_index_image', schema=None) as batch_op:
21 | batch_op.add_column(sa.Column('ignore_bundle_ocp_version', sa.Boolean(), nullable=True))
22 |
23 |
24 | def downgrade():
25 | with op.batch_alter_table('request_merge_index_image', schema=None) as batch_op:
26 | batch_op.drop_column('ignore_bundle_ocp_version')
27 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/2ab3d4558cb6_add_omps_operator_version.py:
--------------------------------------------------------------------------------
1 | """Extending RequestAdd for omps_operator_version.
2 |
3 | Revision ID: 2ab3d4558cb6
4 | Revises: 71c998c1c210
5 | Create Date: 2020-09-01 13:19:32.267607
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '2ab3d4558cb6'
14 | down_revision = '71c998c1c210'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 |
21 | with op.batch_alter_table('request_add', schema=None) as batch_op:
22 | batch_op.add_column(sa.Column('omps_operator_version', sa.String(), nullable=True))
23 |
24 |
25 | def downgrade():
26 |
27 | with op.batch_alter_table('request_add', schema=None) as batch_op:
28 | batch_op.drop_column('omps_operator_version')
29 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/3283f52e7329_add_internal_index_image_copy_to_add_and_rm.py:
--------------------------------------------------------------------------------
1 | """Add internal_index_image_copy to Add and Rm.
2 |
3 | Revision ID: 3283f52e7329
4 | Revises: 5188702409d9
5 | Create Date: 2022-06-10 01:41:18.583209
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '3283f52e7329'
14 | down_revision = '5188702409d9'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | with op.batch_alter_table('request_add', schema=None) as batch_op:
21 | batch_op.add_column(sa.Column('internal_index_image_copy_id', sa.Integer(), nullable=True))
22 | batch_op.add_column(
23 | sa.Column('internal_index_image_copy_resolved_id', sa.Integer(), nullable=True)
24 | )
25 | batch_op.create_foreign_key(
26 | "internal_index_image_copy_id_fkey", 'image', ['internal_index_image_copy_id'], ['id']
27 | )
28 | batch_op.create_foreign_key(
29 | "internal_index_image_copy_resolved_id_fkey",
30 | 'image',
31 | ['internal_index_image_copy_resolved_id'],
32 | ['id'],
33 | )
34 |
35 | with op.batch_alter_table('request_create_empty_index', schema=None) as batch_op:
36 | batch_op.add_column(sa.Column('internal_index_image_copy_id', sa.Integer(), nullable=True))
37 | batch_op.add_column(
38 | sa.Column('internal_index_image_copy_resolved_id', sa.Integer(), nullable=True)
39 | )
40 | batch_op.create_foreign_key(
41 | "internal_index_image_copy_id_fkey", 'image', ['internal_index_image_copy_id'], ['id']
42 | )
43 | batch_op.create_foreign_key(
44 | "internal_index_image_copy_resolved_id_fkey",
45 | 'image',
46 | ['internal_index_image_copy_resolved_id'],
47 | ['id'],
48 | )
49 |
50 | with op.batch_alter_table('request_rm', schema=None) as batch_op:
51 | batch_op.add_column(sa.Column('internal_index_image_copy_id', sa.Integer(), nullable=True))
52 | batch_op.add_column(
53 | sa.Column('internal_index_image_copy_resolved_id', sa.Integer(), nullable=True)
54 | )
55 | batch_op.create_foreign_key(
56 | "internal_index_image_copy_resolved_id_fkey",
57 | 'image',
58 | ['internal_index_image_copy_resolved_id'],
59 | ['id'],
60 | )
61 | batch_op.create_foreign_key(
62 | "internal_index_image_copy_id_fkey", 'image', ['internal_index_image_copy_id'], ['id']
63 | )
64 |
65 |
66 | def downgrade():
67 | with op.batch_alter_table('request_rm', schema=None) as batch_op:
68 | batch_op.drop_constraint("internal_index_image_copy_resolved_id_fkey", type_='foreignkey')
69 | batch_op.drop_constraint("internal_index_image_copy_id_fkey", type_='foreignkey')
70 | batch_op.drop_column('internal_index_image_copy_resolved_id')
71 | batch_op.drop_column('internal_index_image_copy_id')
72 |
73 | with op.batch_alter_table('request_create_empty_index', schema=None) as batch_op:
74 | batch_op.drop_constraint("internal_index_image_copy_resolved_id_fkey", type_='foreignkey')
75 | batch_op.drop_constraint("internal_index_image_copy_id_fkey", type_='foreignkey')
76 | batch_op.drop_column('internal_index_image_copy_resolved_id')
77 | batch_op.drop_column('internal_index_image_copy_id')
78 |
79 | with op.batch_alter_table('request_add', schema=None) as batch_op:
80 | batch_op.drop_constraint("internal_index_image_copy_resolved_id_fkey", type_='foreignkey')
81 | batch_op.drop_constraint("internal_index_image_copy_id_fkey", type_='foreignkey')
82 | batch_op.drop_column('internal_index_image_copy_resolved_id')
83 | batch_op.drop_column('internal_index_image_copy_id')
84 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/49d13af4b328_add_add_deprecations_api_endpoint.py:
--------------------------------------------------------------------------------
1 | """Add add-deprecations API endpoint.
2 |
3 | Revision ID: 49d13af4b328
4 | Revises: 1920ad83d0ab
5 | Create Date: 2024-07-26 00:17:44.283197
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '49d13af4b328'
14 | down_revision = '1920ad83d0ab'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table(
21 | 'deprecation_schema',
22 | sa.Column('id', sa.Integer(), nullable=False),
23 | sa.Column('schema', sa.Text(), nullable=False),
24 | sa.PrimaryKeyConstraint('id'),
25 | )
26 | with op.batch_alter_table('deprecation_schema', schema=None) as batch_op:
27 | batch_op.create_index(batch_op.f('ix_deprecation_schema_schema'), ['schema'], unique=True)
28 |
29 | op.create_table(
30 | 'request_add_deprecations',
31 | sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
32 | sa.Column('binary_image_id', sa.Integer(), nullable=True),
33 | sa.Column('binary_image_resolved_id', sa.Integer(), nullable=True),
34 | sa.Column('from_index_id', sa.Integer(), nullable=True),
35 | sa.Column('from_index_resolved_id', sa.Integer(), nullable=True),
36 | sa.Column('index_image_id', sa.Integer(), nullable=True),
37 | sa.Column('index_image_resolved_id', sa.Integer(), nullable=True),
38 | sa.Column('internal_index_image_copy_id', sa.Integer(), nullable=True),
39 | sa.Column('internal_index_image_copy_resolved_id', sa.Integer(), nullable=True),
40 | sa.Column('distribution_scope', sa.String(), nullable=True),
41 | sa.ForeignKeyConstraint(
42 | ['binary_image_id'],
43 | ['image.id'],
44 | ),
45 | sa.ForeignKeyConstraint(
46 | ['binary_image_resolved_id'],
47 | ['image.id'],
48 | ),
49 | sa.ForeignKeyConstraint(
50 | ['from_index_id'],
51 | ['image.id'],
52 | ),
53 | sa.ForeignKeyConstraint(
54 | ['from_index_resolved_id'],
55 | ['image.id'],
56 | ),
57 | sa.ForeignKeyConstraint(
58 | ['id'],
59 | ['request.id'],
60 | ),
61 | sa.ForeignKeyConstraint(
62 | ['index_image_id'],
63 | ['image.id'],
64 | ),
65 | sa.ForeignKeyConstraint(
66 | ['index_image_resolved_id'],
67 | ['image.id'],
68 | ),
69 | sa.ForeignKeyConstraint(
70 | ['internal_index_image_copy_id'],
71 | ['image.id'],
72 | ),
73 | sa.ForeignKeyConstraint(
74 | ['internal_index_image_copy_resolved_id'],
75 | ['image.id'],
76 | ),
77 | sa.PrimaryKeyConstraint('id'),
78 | )
79 | op.create_table(
80 | 'request_add_deprecations_deprecation_schema',
81 | sa.Column('request_add_deprecations_id', sa.Integer(), autoincrement=False, nullable=False),
82 | sa.Column('deprecation_schema_id', sa.Integer(), autoincrement=False, nullable=False),
83 | sa.ForeignKeyConstraint(
84 | ['deprecation_schema_id'],
85 | ['deprecation_schema.id'],
86 | ),
87 | sa.ForeignKeyConstraint(
88 | ['request_add_deprecations_id'],
89 | ['request_add_deprecations.id'],
90 | ),
91 | sa.PrimaryKeyConstraint('request_add_deprecations_id', 'deprecation_schema_id'),
92 | sa.UniqueConstraint('request_add_deprecations_id', 'deprecation_schema_id'),
93 | )
94 | with op.batch_alter_table(
95 | 'request_add_deprecations_deprecation_schema', schema=None
96 | ) as batch_op:
97 | batch_op.create_index(
98 | batch_op.f('ix_request_add_deprecations_deprecation_schema_deprecation_schema_id'),
99 | ['deprecation_schema_id'],
100 | unique=False,
101 | )
102 | batch_op.create_index(
103 | batch_op.f(
104 | 'ix_request_add_deprecations_deprecation_schema_request_add_deprecations_id'
105 | ),
106 | ['request_add_deprecations_id'],
107 | unique=False,
108 | )
109 |
110 | op.create_table(
111 | 'request_add_deprecations_operator',
112 | sa.Column('request_add_deprecations_id', sa.Integer(), autoincrement=False, nullable=False),
113 | sa.Column('operator_id', sa.Integer(), autoincrement=False, nullable=False),
114 | sa.ForeignKeyConstraint(
115 | ['operator_id'],
116 | ['operator.id'],
117 | ),
118 | sa.ForeignKeyConstraint(
119 | ['request_add_deprecations_id'],
120 | ['request_add_deprecations.id'],
121 | ),
122 | sa.PrimaryKeyConstraint('request_add_deprecations_id', 'operator_id'),
123 | sa.UniqueConstraint('request_add_deprecations_id', 'operator_id'),
124 | )
125 | with op.batch_alter_table('request_add_deprecations_operator', schema=None) as batch_op:
126 | batch_op.create_index(
127 | batch_op.f('ix_request_add_deprecations_operator_operator_id'),
128 | ['operator_id'],
129 | unique=False,
130 | )
131 | batch_op.create_index(
132 | batch_op.f('ix_request_add_deprecations_operator_request_add_deprecations_id'),
133 | ['request_add_deprecations_id'],
134 | unique=False,
135 | )
136 |
137 |
138 | def downgrade():
139 | with op.batch_alter_table('request_add_deprecations_operator', schema=None) as batch_op:
140 | batch_op.drop_index(
141 | batch_op.f('ix_request_add_deprecations_operator_request_add_deprecations_id')
142 | )
143 | batch_op.drop_index(batch_op.f('ix_request_add_deprecations_operator_operator_id'))
144 |
145 | op.drop_table('request_add_deprecations_operator')
146 | with op.batch_alter_table(
147 | 'request_add_deprecations_deprecation_schema', schema=None
148 | ) as batch_op:
149 | batch_op.drop_index(
150 | batch_op.f('ix_request_add_deprecations_deprecation_schema_request_add_deprecations_id')
151 | )
152 | batch_op.drop_index(
153 | batch_op.f('ix_request_add_deprecations_deprecation_schema_deprecation_schema_id')
154 | )
155 |
156 | op.drop_table('request_add_deprecations_deprecation_schema')
157 | op.drop_table('request_add_deprecations')
158 | with op.batch_alter_table('deprecation_schema', schema=None) as batch_op:
159 | batch_op.drop_index(batch_op.f('ix_deprecation_schema_schema'))
160 |
161 | op.drop_table('deprecation_schema')
162 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/4c9db41195ec_add_merge_index_image_api_endpoint.py:
--------------------------------------------------------------------------------
1 | """Add merge-index-image api endpoint.
2 |
3 | Revision ID: 4c9db41195ec
4 | Revises: bc29053265ba
5 | Create Date: 2020-09-28 23:06:43.267716
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '4c9db41195ec'
14 | down_revision = 'bc29053265ba'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table(
21 | 'request_merge_index_image',
22 | sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
23 | sa.Column('binary_image_id', sa.Integer(), nullable=False),
24 | sa.Column('binary_image_resolved_id', sa.Integer(), nullable=True),
25 | sa.Column('index_image_id', sa.Integer(), nullable=True),
26 | sa.Column('source_from_index_id', sa.Integer(), nullable=False),
27 | sa.Column('source_from_index_resolved_id', sa.Integer(), nullable=True),
28 | sa.Column('target_index_id', sa.Integer(), nullable=True),
29 | sa.Column('target_index_resolved_id', sa.Integer(), nullable=True),
30 | sa.ForeignKeyConstraint(
31 | ['binary_image_id'],
32 | ['image.id'],
33 | ),
34 | sa.ForeignKeyConstraint(
35 | ['binary_image_resolved_id'],
36 | ['image.id'],
37 | ),
38 | sa.ForeignKeyConstraint(
39 | ['id'],
40 | ['request.id'],
41 | ),
42 | sa.ForeignKeyConstraint(
43 | ['index_image_id'],
44 | ['image.id'],
45 | ),
46 | sa.ForeignKeyConstraint(
47 | ['source_from_index_id'],
48 | ['image.id'],
49 | ),
50 | sa.ForeignKeyConstraint(
51 | ['source_from_index_resolved_id'],
52 | ['image.id'],
53 | ),
54 | sa.ForeignKeyConstraint(
55 | ['target_index_id'],
56 | ['image.id'],
57 | ),
58 | sa.ForeignKeyConstraint(
59 | ['target_index_resolved_id'],
60 | ['image.id'],
61 | ),
62 | sa.PrimaryKeyConstraint('id'),
63 | )
64 |
65 | op.create_table(
66 | 'bundle_deprecation',
67 | sa.Column('merge_index_image_id', sa.Integer(), autoincrement=False, nullable=False),
68 | sa.Column('bundle_id', sa.Integer(), autoincrement=False, nullable=False),
69 | sa.ForeignKeyConstraint(
70 | ['bundle_id'],
71 | ['image.id'],
72 | ),
73 | sa.ForeignKeyConstraint(
74 | ['merge_index_image_id'],
75 | ['request_merge_index_image.id'],
76 | ),
77 | sa.PrimaryKeyConstraint('merge_index_image_id', 'bundle_id'),
78 | sa.UniqueConstraint(
79 | 'merge_index_image_id', 'bundle_id', name='merge_index_bundle_constraint'
80 | ),
81 | )
82 | with op.batch_alter_table('bundle_deprecation', schema=None) as batch_op:
83 | batch_op.create_index(
84 | batch_op.f('ix_bundle_deprecation_bundle_id'), ['bundle_id'], unique=False
85 | )
86 | batch_op.create_index(
87 | batch_op.f('ix_bundle_deprecation_merge_index_image_id'),
88 | ['merge_index_image_id'],
89 | unique=False,
90 | )
91 |
92 |
93 | def downgrade():
94 | op.drop_table('request_merge_index_image')
95 | with op.batch_alter_table('bundle_deprecation', schema=None) as batch_op:
96 | batch_op.drop_index(batch_op.f('ix_bundle_deprecation_merge_index_image_id'))
97 | batch_op.drop_index(batch_op.f('ix_bundle_deprecation_bundle_id'))
98 |
99 | op.drop_table('bundle_deprecation')
100 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/5188702409d9_extra_build_tags.py:
--------------------------------------------------------------------------------
1 | """Added BuildTag and RequestBuildTag.
2 |
3 | Revision ID: 5188702409d9
4 | Revises: e16a8cd2e028
5 | Create Date: 2021-09-29 12:19:11.632047
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '5188702409d9'
14 | down_revision = 'e16a8cd2e028'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table(
21 | 'build_tag',
22 | sa.Column('id', sa.Integer(), nullable=False),
23 | sa.Column('name', sa.String(), nullable=False),
24 | sa.PrimaryKeyConstraint('id'),
25 | )
26 | op.create_table(
27 | 'request_build_tag',
28 | sa.Column('request_id', sa.Integer(), nullable=False),
29 | sa.Column('tag_id', sa.Integer(), autoincrement=False, nullable=False),
30 | sa.ForeignKeyConstraint(
31 | ['request_id'],
32 | ['request.id'],
33 | ),
34 | sa.ForeignKeyConstraint(
35 | ['tag_id'],
36 | ['build_tag.id'],
37 | ),
38 | sa.PrimaryKeyConstraint('request_id', 'tag_id'),
39 | sa.UniqueConstraint('request_id', 'tag_id'),
40 | )
41 | with op.batch_alter_table('request_build_tag', schema=None) as batch_op:
42 | batch_op.create_index(
43 | batch_op.f('ix_request_build_tag_request_id'), ['request_id'], unique=False
44 | )
45 | batch_op.create_index(batch_op.f('ix_request_build_tag_tag_id'), ['tag_id'], unique=False)
46 |
47 | with op.batch_alter_table('request_create_empty_index', schema=None) as batch_op:
48 | batch_op.alter_column('from_index_id', existing_type=sa.INTEGER(), nullable=True)
49 |
50 |
51 | def downgrade():
52 | with op.batch_alter_table('request_create_empty_index', schema=None) as batch_op:
53 | batch_op.alter_column('from_index_id', existing_type=sa.INTEGER(), nullable=False)
54 |
55 | with op.batch_alter_table('request_build_tag', schema=None) as batch_op:
56 | batch_op.drop_index(batch_op.f('ix_request_build_tag_tag_id'))
57 | batch_op.drop_index(batch_op.f('ix_request_build_tag_request_id'))
58 |
59 | op.drop_table('request_build_tag')
60 | op.drop_table('build_tag')
61 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/56d96595c0f7_add_batches.py:
--------------------------------------------------------------------------------
1 | """
2 | Add batches for requests.
3 |
4 | Revision ID: 56d96595c0f7
5 | Revises: 5d6808c0ce1f
6 | Create Date: 2020-04-23 15:52:38.614572
7 |
8 | """
9 | import logging
10 |
11 | from alembic import op
12 | import sqlalchemy as sa
13 |
14 |
15 | # revision identifiers, used by Alembic.
16 | revision = '56d96595c0f7'
17 | down_revision = '5d6808c0ce1f'
18 | branch_labels = None
19 | depends_on = None
20 |
21 | log = logging.getLogger('alembic')
22 |
23 | request_table = sa.Table(
24 | 'request',
25 | sa.MetaData(),
26 | sa.Column('id', sa.Integer(), primary_key=True),
27 | sa.Column('batch_id', sa.Integer()),
28 | )
29 |
30 | batch_table = sa.Table('batch', sa.MetaData(), sa.Column('id', sa.Integer(), primary_key=True))
31 |
32 |
33 | def upgrade():
34 | op.create_table(
35 | 'batch', sa.Column('id', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id')
36 | )
37 |
38 | with op.batch_alter_table('request') as batch_op:
39 | batch_op.add_column(sa.Column('batch_id', sa.Integer(), nullable=True))
40 | batch_op.create_index(batch_op.f('ix_request_batch_id'), ['batch_id'], unique=False)
41 | batch_op.create_foreign_key('request_batch_id_fk', 'batch', ['batch_id'], ['id'])
42 |
43 | connection = op.get_bind()
44 | # Iterate through all the existing requests
45 | for request in connection.execute(
46 | request_table.select().order_by(request_table.c.id)
47 | ).fetchall():
48 | # Create a new batch per request
49 | connection.execute(batch_table.insert())
50 | # Get the ID of the last created batch
51 | new_batch_id = connection.execute(
52 | batch_table.select().order_by(batch_table.c.id.desc()).limit(1)
53 | ).scalar()
54 | # Set the request's batch as the last created batch
55 | log.info('Adding request %d to batch %d', request.id, new_batch_id)
56 | connection.execute(
57 | request_table.update()
58 | .where(request_table.c.id == request.id)
59 | .values(batch_id=new_batch_id)
60 | )
61 |
62 | # Now that the batches are all set on the requests, make the batch value not nullable
63 | with op.batch_alter_table('request') as batch_op:
64 | batch_op.alter_column('batch_id', existing_type=sa.INTEGER(), nullable=False)
65 |
66 |
67 | def downgrade():
68 | with op.batch_alter_table('request') as batch_op:
69 | batch_op.drop_constraint('request_batch_id_fk', type_='foreignkey')
70 | batch_op.drop_index(batch_op.f('ix_request_batch_id'))
71 | batch_op.drop_column('batch_id')
72 |
73 | op.drop_table('batch')
74 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/5d6808c0ce1f_regenerate_bundle_request.py:
--------------------------------------------------------------------------------
1 | """
2 | Add regenerate-bundle request type.
3 |
4 | Revision ID: 5d6808c0ce1f
5 | Revises: 04dd7532d9c5
6 | Create Date: 2020-04-20 15:25:49.509996
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '5d6808c0ce1f'
14 | down_revision = '04dd7532d9c5'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | REQUEST_TYPE_REGENERATE_BUNDLE = 3
20 |
21 |
22 | # Create references to the tables used to migrate data during the upgrade
23 | # and the downgrade processes.
24 |
25 | # sqlalchemy 2.0: https://docs.sqlalchemy.org/en/20/changelog/migration_20.html#migration-core-usage
26 | # where clause parameter in select is not longer supported and list in select has been deprecated.
27 | request_table = sa.Table(
28 | 'request',
29 | sa.MetaData(),
30 | sa.Column('id', sa.Integer(), primary_key=True),
31 | sa.Column('type', sa.Integer()),
32 | )
33 |
34 |
35 | def upgrade():
36 | op.create_table(
37 | 'request_regenerate_bundle',
38 | sa.Column('id', sa.Integer(), nullable=False, autoincrement=False),
39 | sa.Column('bundle_image_id', sa.Integer(), nullable=True),
40 | sa.Column('from_bundle_image_id', sa.Integer(), nullable=False),
41 | sa.Column('from_bundle_image_resolved_id', sa.Integer(), nullable=True),
42 | sa.Column('organization', sa.String(), nullable=True),
43 | sa.ForeignKeyConstraint(['bundle_image_id'], ['image.id']),
44 | sa.ForeignKeyConstraint(['from_bundle_image_id'], ['image.id']),
45 | sa.ForeignKeyConstraint(['from_bundle_image_resolved_id'], ['image.id']),
46 | sa.ForeignKeyConstraint(['id'], ['request.id']),
47 | sa.PrimaryKeyConstraint('id'),
48 | )
49 |
50 |
51 | def downgrade():
52 | connection = op.get_bind()
53 |
54 | # Before we can drop the request_regenerate_bundle table, we need to be sure
55 | # there are no records of that type in the database since the data loss is
56 | # irreversible.
57 | regenerate_bundle_requests = connection.execute(
58 | sa.select(sa.func.count())
59 | .select_from(request_table)
60 | .where(request_table.c.type == REQUEST_TYPE_REGENERATE_BUNDLE)
61 | ).scalar()
62 | if regenerate_bundle_requests:
63 | raise RuntimeError(
64 | 'Unable to perform migration. {} regenerate-bundle request(s) exist!'.format(
65 | regenerate_bundle_requests
66 | )
67 | )
68 |
69 | op.drop_table('request_regenerate_bundle')
70 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/60f89c046096_make_binary_image_optional.py:
--------------------------------------------------------------------------------
1 | """Make binary_image optional.
2 |
3 | Revision ID: 60f89c046096
4 | Revises: 983a81fe5e98
5 | Create Date: 2020-10-12 15:49:24.523019
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '60f89c046096'
14 | down_revision = '983a81fe5e98'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | with op.batch_alter_table('request_add', schema=None) as batch_op:
21 | batch_op.alter_column('binary_image_id', existing_type=sa.INTEGER(), nullable=True)
22 |
23 | with op.batch_alter_table('request_merge_index_image', schema=None) as batch_op:
24 | batch_op.alter_column('binary_image_id', existing_type=sa.INTEGER(), nullable=True)
25 |
26 | with op.batch_alter_table('request_rm', schema=None) as batch_op:
27 | batch_op.alter_column('binary_image_id', existing_type=sa.INTEGER(), nullable=True)
28 |
29 |
30 | def downgrade():
31 | with op.batch_alter_table('request_rm', schema=None) as batch_op:
32 | batch_op.alter_column('binary_image_id', existing_type=sa.INTEGER(), nullable=False)
33 |
34 | with op.batch_alter_table('request_merge_index_image', schema=None) as batch_op:
35 | batch_op.alter_column('binary_image_id', existing_type=sa.INTEGER(), nullable=False)
36 |
37 | with op.batch_alter_table('request_add', schema=None) as batch_op:
38 | batch_op.alter_column('binary_image_id', existing_type=sa.INTEGER(), nullable=False)
39 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/625fba6081be_add_recursive_related_bundles_endpoint.py:
--------------------------------------------------------------------------------
1 | """Add recursive_related_bundles endpoint.
2 |
3 | Revision ID: 625fba6081be
4 | Revises: 3283f52e7329
5 | Create Date: 2022-08-25 17:40:56.784924
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '625fba6081be'
14 | down_revision = '3283f52e7329'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table(
21 | 'request_recursive_related_bundles',
22 | sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
23 | sa.Column('parent_bundle_image_id', sa.Integer(), nullable=True),
24 | sa.Column('parent_bundle_image_resolved_id', sa.Integer(), nullable=True),
25 | sa.Column('organization', sa.String(), nullable=True),
26 | sa.ForeignKeyConstraint(
27 | ['id'],
28 | ['request.id'],
29 | ),
30 | sa.ForeignKeyConstraint(
31 | ['parent_bundle_image_id'],
32 | ['image.id'],
33 | ),
34 | sa.ForeignKeyConstraint(
35 | ['parent_bundle_image_resolved_id'],
36 | ['image.id'],
37 | ),
38 | sa.PrimaryKeyConstraint('id'),
39 | )
40 |
41 |
42 | def downgrade():
43 | op.drop_table('request_recursive_related_bundles')
44 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/71c998c1c210_batch_annotations.py:
--------------------------------------------------------------------------------
1 | """
2 | Add batch annotations.
3 |
4 | Revision ID: 71c998c1c210
5 | Revises: 56d96595c0f7
6 | Create Date: 2020-05-07 18:07:20.123669
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '71c998c1c210'
14 | down_revision = '56d96595c0f7'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | with op.batch_alter_table('batch') as batch_op:
21 | batch_op.add_column(sa.Column('annotations', sa.Text(), nullable=True))
22 |
23 |
24 | def downgrade():
25 | with op.batch_alter_table('batch') as batch_op:
26 | batch_op.drop_column('annotations')
27 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/7346beaff092_add_check_related_image_flag.py:
--------------------------------------------------------------------------------
1 | """Add check_related_images flag.
2 |
3 | Revision ID: 7346beaff092
4 | Revises: daf67ddcf4a1
5 | Create Date: 2023-08-09 23:48:37.624078
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '7346beaff092'
14 | down_revision = 'daf67ddcf4a1'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | with op.batch_alter_table('request_add', schema=None) as batch_op:
21 | batch_op.add_column(sa.Column('check_related_images', sa.BOOLEAN(), nullable=True))
22 |
23 |
24 | def downgrade():
25 | with op.batch_alter_table('request_add', schema=None) as batch_op:
26 | batch_op.drop_column('check_related_images')
27 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/7573241a5156_rename_bundle_deprecation_association_.py:
--------------------------------------------------------------------------------
1 | """Rename bundle deprecation association for merge index.
2 |
3 | Revision ID: 7573241a5156
4 | Revises: eec630370e68
5 | Create Date: 2021-02-09 13:50:01.905796
6 |
7 | """
8 | from alembic import op
9 |
10 |
11 | # revision identifiers, used by Alembic.
12 | revision = '7573241a5156'
13 | down_revision = 'eec630370e68'
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade():
19 |
20 | with op.batch_alter_table('bundle_deprecation', schema=None) as batch_op:
21 | batch_op.drop_index('ix_bundle_deprecation_bundle_id')
22 | batch_op.drop_index('ix_bundle_deprecation_merge_index_image_id')
23 |
24 | op.rename_table('bundle_deprecation', 'request_merge_bundle_deprecation', schema=None)
25 |
26 | with op.batch_alter_table('request_merge_bundle_deprecation', schema=None) as batch_op:
27 | batch_op.create_index(
28 | batch_op.f('ix_request_merge_bundle_deprecation_bundle_id'), ['bundle_id'], unique=False
29 | )
30 | batch_op.create_index(
31 | batch_op.f('ix_request_merge_bundle_deprecation_merge_index_image_id'),
32 | ['merge_index_image_id'],
33 | unique=False,
34 | )
35 |
36 |
37 | def downgrade():
38 | with op.batch_alter_table('request_merge_bundle_deprecation', schema=None) as batch_op:
39 | batch_op.drop_index(batch_op.f('ix_request_merge_bundle_deprecation_merge_index_image_id'))
40 | batch_op.drop_index(batch_op.f('ix_request_merge_bundle_deprecation_bundle_id'))
41 |
42 | op.rename_table('request_merge_bundle_deprecation', 'bundle_deprecation', schema=None)
43 |
44 | with op.batch_alter_table('bundle_deprecation', schema=None) as batch_op:
45 | batch_op.create_index(
46 | 'ix_bundle_deprecation_merge_index_image_id', ['merge_index_image_id'], unique=False
47 | )
48 | batch_op.create_index('ix_bundle_deprecation_bundle_id', ['bundle_id'], unique=False)
49 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/8d50f82f0be9_fbc_operations_api.py:
--------------------------------------------------------------------------------
1 | """
2 | Add fbc-operations api.
3 |
4 | Revision ID: 8d50f82f0be9
5 | Revises: a0eadb516360
6 | Create Date: 2023-01-04 10:39:49.366511
7 |
8 | """
9 | from alembic import op
10 | import sqlalchemy as sa
11 |
12 |
13 | # revision identifiers, used by Alembic.
14 | revision = '8d50f82f0be9'
15 | down_revision = 'a0eadb516360'
16 | branch_labels = None
17 | depends_on = None
18 |
19 |
20 | def upgrade():
21 | op.create_table(
22 | 'request_fbc_operations',
23 | sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
24 | sa.Column('fbc_fragment_id', sa.Integer(), nullable=True),
25 | sa.Column('fbc_fragment_resolved_id', sa.Integer(), nullable=True),
26 | sa.Column('binary_image_id', sa.Integer(), nullable=True),
27 | sa.Column('binary_image_resolved_id', sa.Integer(), nullable=True),
28 | sa.Column('from_index_id', sa.Integer(), nullable=True),
29 | sa.Column('from_index_resolved_id', sa.Integer(), nullable=True),
30 | sa.Column('index_image_id', sa.Integer(), nullable=True),
31 | sa.Column('index_image_resolved_id', sa.Integer(), nullable=True),
32 | sa.Column('internal_index_image_copy_id', sa.Integer(), nullable=True),
33 | sa.Column('internal_index_image_copy_resolved_id', sa.Integer(), nullable=True),
34 | sa.Column('distribution_scope', sa.String(), nullable=True),
35 | sa.ForeignKeyConstraint(
36 | ['binary_image_id'],
37 | ['image.id'],
38 | ),
39 | sa.ForeignKeyConstraint(
40 | ['binary_image_resolved_id'],
41 | ['image.id'],
42 | ),
43 | sa.ForeignKeyConstraint(
44 | ['fbc_fragment_id'],
45 | ['image.id'],
46 | ),
47 | sa.ForeignKeyConstraint(
48 | ['fbc_fragment_resolved_id'],
49 | ['image.id'],
50 | ),
51 | sa.ForeignKeyConstraint(
52 | ['from_index_id'],
53 | ['image.id'],
54 | ),
55 | sa.ForeignKeyConstraint(
56 | ['from_index_resolved_id'],
57 | ['image.id'],
58 | ),
59 | sa.ForeignKeyConstraint(
60 | ['id'],
61 | ['request.id'],
62 | ),
63 | sa.ForeignKeyConstraint(
64 | ['index_image_id'],
65 | ['image.id'],
66 | ),
67 | sa.ForeignKeyConstraint(
68 | ['index_image_resolved_id'],
69 | ['image.id'],
70 | ),
71 | sa.ForeignKeyConstraint(
72 | ['internal_index_image_copy_id'],
73 | ['image.id'],
74 | ),
75 | sa.ForeignKeyConstraint(
76 | ['internal_index_image_copy_resolved_id'],
77 | ['image.id'],
78 | ),
79 | sa.PrimaryKeyConstraint('id'),
80 | )
81 |
82 |
83 | def downgrade():
84 | op.drop_table('request_fbc_operations')
85 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/983a81fe5e98_added_distribution_scope_attribute_for_.py:
--------------------------------------------------------------------------------
1 | """Added distribution scope attribute for RequestMergeIndexImage.
2 |
3 | Revision ID: 983a81fe5e98
4 | Revises: 4c9db41195ec
5 | Create Date: 2020-10-08 13:25:25.662595
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | revision = '983a81fe5e98'
13 | down_revision = '4c9db41195ec'
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade():
19 | with op.batch_alter_table('request_merge_index_image', schema=None) as batch_op:
20 | batch_op.add_column(sa.Column('distribution_scope', sa.String(), nullable=True))
21 |
22 |
23 | def downgrade():
24 | with op.batch_alter_table('request_merge_index_image', schema=None) as batch_op:
25 | batch_op.drop_column('distribution_scope')
26 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/9d60d35786c1_added_index_image_resolved.py:
--------------------------------------------------------------------------------
1 | """Added index_image_resolved.
2 |
3 | Revision ID: 9d60d35786c1
4 | Revises: 7573241a5156
5 | Create Date: 2021-02-11 15:48:27.192389
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | revision = '9d60d35786c1'
13 | down_revision = '7573241a5156'
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade():
19 | with op.batch_alter_table('request_add', schema=None) as batch_op:
20 | batch_op.add_column(sa.Column('index_image_resolved_id', sa.Integer(), nullable=True))
21 | batch_op.create_foreign_key(
22 | "index_image_resolved_id_fkey", 'image', ['index_image_resolved_id'], ['id']
23 | )
24 |
25 | with op.batch_alter_table('request_rm', schema=None) as batch_op:
26 | batch_op.add_column(sa.Column('index_image_resolved_id', sa.Integer(), nullable=True))
27 | batch_op.create_foreign_key(
28 | "index_image_resolved_id_fkey", 'image', ['index_image_resolved_id'], ['id']
29 | )
30 |
31 |
32 | def downgrade():
33 | with op.batch_alter_table('request_rm', schema=None) as batch_op:
34 | batch_op.drop_constraint("index_image_resolved_id_fkey", type_='foreignkey')
35 | batch_op.drop_column('index_image_resolved_id')
36 |
37 | with op.batch_alter_table('request_add', schema=None) as batch_op:
38 | batch_op.drop_constraint("index_image_resolved_id_fkey", type_='foreignkey')
39 | batch_op.drop_column('index_image_resolved_id')
40 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/9e9d4f9730c8_merge_graph_update.py:
--------------------------------------------------------------------------------
1 | """Adding graph_update_mode to merge index request.
2 |
3 | Revision ID: 9e9d4f9730c8
4 | Revises: 7346beaff092
5 | Create Date: 2023-10-17 11:11:10.558335
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '9e9d4f9730c8'
14 | down_revision = '7346beaff092'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | with op.batch_alter_table('request_merge_index_image', schema=None) as batch_op:
22 | batch_op.add_column(sa.Column('graph_update_mode', sa.String(), nullable=True))
23 |
24 | # ### end Alembic commands ###
25 |
26 |
27 | def downgrade():
28 | # ### commands auto generated by Alembic - please adjust! ###
29 | with op.batch_alter_table('request_merge_index_image', schema=None) as batch_op:
30 | batch_op.drop_column('graph_update_mode')
31 |
32 | # ### end Alembic commands ###
33 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/a0eadb516360_update_regenerate_bundle_request_.py:
--------------------------------------------------------------------------------
1 | """Update regenerate_bundle_request endpoint.
2 |
3 | Revision ID: a0eadb516360
4 | Revises: 625fba6081be
5 | Create Date: 2022-09-06 15:00:55.115536
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'a0eadb516360'
14 | down_revision = '625fba6081be'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | with op.batch_alter_table('request_regenerate_bundle', schema=None) as batch_op:
21 | batch_op.add_column(sa.Column('bundle_replacements', sa.String(), nullable=True))
22 |
23 |
24 | def downgrade():
25 | with op.batch_alter_table('request_regenerate_bundle', schema=None) as batch_op:
26 | batch_op.drop_column('bundle_replacements')
27 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/bc29053265ba_add_distribution_scope.py:
--------------------------------------------------------------------------------
1 | """Add distribution scope.
2 |
3 | Revision ID: bc29053265ba
4 | Revises: 2ab3d4558cb6
5 | Create Date: 2020-09-20 02:26:45.531336
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'bc29053265ba'
14 | down_revision = '2ab3d4558cb6'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | with op.batch_alter_table('request_add', schema=None) as batch_op:
21 | batch_op.add_column(sa.Column('distribution_scope', sa.String(), nullable=True))
22 |
23 | with op.batch_alter_table('request_rm', schema=None) as batch_op:
24 | batch_op.add_column(sa.Column('distribution_scope', sa.String(), nullable=True))
25 |
26 |
27 | def downgrade():
28 | with op.batch_alter_table('request_rm', schema=None) as batch_op:
29 | batch_op.drop_column('distribution_scope')
30 |
31 | with op.batch_alter_table('request_add', schema=None) as batch_op:
32 | batch_op.drop_column('distribution_scope')
33 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/daf67ddcf4a1_add_support_for_graph_update_mode_in_.py:
--------------------------------------------------------------------------------
1 | """Add support for graph_update_mode in Add endpoint.
2 |
3 | Revision ID: daf67ddcf4a1
4 | Revises: 8d50f82f0be9
5 | Create Date: 2023-07-27 15:37:59.568914
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | revision = 'daf67ddcf4a1'
13 | down_revision = '8d50f82f0be9'
14 | branch_labels = None
15 | depends_on = None
16 |
17 |
18 | def upgrade():
19 | with op.batch_alter_table('request_add', schema=None) as batch_op:
20 | batch_op.add_column(sa.Column('graph_update_mode', sa.String(), nullable=True))
21 |
22 |
23 | def downgrade():
24 | with op.batch_alter_table('request_add', schema=None) as batch_op:
25 | batch_op.drop_column('graph_update_mode')
26 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/e16a8cd2e028_add_create_empty_index.py:
--------------------------------------------------------------------------------
1 | """Add RequestCreateEmptyIndex model.
2 |
3 | Revision ID: e16a8cd2e028
4 | Revises: 9d60d35786c1
5 | Create Date: 2021-04-29 12:04:28.272171
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'e16a8cd2e028'
14 | down_revision = '9d60d35786c1'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table(
21 | 'request_create_empty_index',
22 | sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
23 | sa.Column('from_index_id', sa.Integer(), nullable=False),
24 | sa.Column('binary_image_id', sa.Integer(), nullable=True),
25 | sa.Column('labels', sa.Text(), nullable=True),
26 | sa.Column('binary_image_resolved_id', sa.Integer(), nullable=True),
27 | sa.Column('from_index_resolved_id', sa.Integer(), nullable=True),
28 | sa.Column('index_image_id', sa.Integer(), nullable=True),
29 | sa.Column('index_image_resolved_id', sa.Integer(), nullable=True),
30 | sa.Column('distribution_scope', sa.String(), nullable=True),
31 | sa.ForeignKeyConstraint(
32 | ['binary_image_id'],
33 | ['image.id'],
34 | ),
35 | sa.ForeignKeyConstraint(
36 | ['binary_image_resolved_id'],
37 | ['image.id'],
38 | ),
39 | sa.ForeignKeyConstraint(
40 | ['from_index_id'],
41 | ['image.id'],
42 | ),
43 | sa.ForeignKeyConstraint(
44 | ['from_index_resolved_id'],
45 | ['image.id'],
46 | ),
47 | sa.ForeignKeyConstraint(
48 | ['id'],
49 | ['request.id'],
50 | ),
51 | sa.ForeignKeyConstraint(
52 | ['index_image_id'],
53 | ['image.id'],
54 | ),
55 | sa.ForeignKeyConstraint(
56 | ['index_image_resolved_id'],
57 | ['image.id'],
58 | ),
59 | sa.PrimaryKeyConstraint('id'),
60 | )
61 |
62 |
63 | def downgrade():
64 | op.drop_table('request_create_empty_index')
65 |
--------------------------------------------------------------------------------
/iib/web/migrations/versions/eec630370e68_support_deprecation_list_in_add_request_.py:
--------------------------------------------------------------------------------
1 | """Support deprecation_list in Add request type.
2 |
3 | Revision ID: eec630370e68
4 | Revises: 60f89c046096
5 | Create Date: 2021-01-20 20:36:29.184275
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'eec630370e68'
14 | down_revision = '60f89c046096'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | op.create_table(
21 | 'request_add_bundle_deprecation',
22 | sa.Column('request_add_id', sa.Integer(), autoincrement=False, nullable=False),
23 | sa.Column('bundle_id', sa.Integer(), autoincrement=False, nullable=False),
24 | sa.ForeignKeyConstraint(
25 | ['bundle_id'],
26 | ['image.id'],
27 | ),
28 | sa.ForeignKeyConstraint(
29 | ['request_add_id'],
30 | ['request_add.id'],
31 | ),
32 | sa.PrimaryKeyConstraint('request_add_id', 'bundle_id'),
33 | sa.UniqueConstraint(
34 | 'request_add_id', 'bundle_id', name='request_add_bundle_deprecation_constraint'
35 | ),
36 | )
37 | with op.batch_alter_table('request_add_bundle_deprecation', schema=None) as batch_op:
38 | batch_op.create_index(
39 | batch_op.f('ix_request_add_bundle_deprecation_bundle_id'), ['bundle_id'], unique=False
40 | )
41 | batch_op.create_index(
42 | batch_op.f('ix_request_add_bundle_deprecation_request_add_id'),
43 | ['request_add_id'],
44 | unique=False,
45 | )
46 |
47 |
48 | def downgrade():
49 | with op.batch_alter_table('request_add_bundle_deprecation', schema=None) as batch_op:
50 | batch_op.drop_index(batch_op.f('ix_request_add_bundle_deprecation_request_add_id'))
51 | batch_op.drop_index(batch_op.f('ix_request_add_bundle_deprecation_bundle_id'))
52 |
53 | op.drop_table('request_add_bundle_deprecation')
54 |
--------------------------------------------------------------------------------
/iib/web/s3_utils.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import logging
3 | from typing import Optional
4 |
5 | import boto3
6 | from botocore.response import StreamingBody
7 |
8 | log = logging.getLogger(__name__)
9 |
10 |
11 | def get_object_from_s3_bucket(
12 | s3_key_prefix: str,
13 | s3_file_name: str,
14 | bucket_name: str,
15 | ) -> Optional[StreamingBody]:
16 | """
17 | Get object from AWS S3 bucket.
18 |
19 | :param str s3_key_prefix: the logical location of the file in the S3 bucket
20 | :param str s3_file_name: the name of the file in S3 bucket
21 | :param str bucket_name: the name of the S3 bucket to fetch the file from
22 | :return: the body of the S3 object or None
23 | :rtype: botocore.response.StreamingBody
24 | """
25 | file_name = f'{s3_key_prefix}/{s3_file_name}'
26 | log.info('getting file from s3 : %s', file_name)
27 | try:
28 | s3_client = boto3.client('s3')
29 | response = s3_client.get_object(Bucket=bucket_name, Key=file_name)
30 | return response['Body']
31 | except Exception as error:
32 | log.exception('Unable to fetch object %s from bucket %s: %s', file_name, bucket_name, error)
33 | return None
34 | finally:
35 | s3_client.close()
36 |
--------------------------------------------------------------------------------
/iib/web/static/docs.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IIB API Documentation
6 |
7 |
8 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/iib/web/utils.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from flask import request, url_for
3 | from flask_sqlalchemy.pagination import Pagination
4 | from typing import Optional
5 |
6 | from iib.web.iib_static_types import PaginationMetadata
7 |
8 |
9 | def pagination_metadata(pagination_query: Pagination, **kwargs) -> PaginationMetadata:
10 | """
11 | Return a dictionary containing metadata about the paginated query.
12 |
13 | This must be run as part of a Flask request.
14 |
15 | :param flask_sqlalchemy.Pagination pagination_query: the paginated query
16 | :param dict kwargs: the query parameters to add to the URLs
17 | :return: a dictionary containing metadata about the paginated query
18 | """
19 | pagination_data: PaginationMetadata = {
20 | 'first': url_for(
21 | str(request.endpoint),
22 | page=1,
23 | per_page=pagination_query.per_page,
24 | _external=True,
25 | **kwargs,
26 | ),
27 | 'last': url_for(
28 | str(request.endpoint),
29 | page=pagination_query.pages,
30 | per_page=pagination_query.per_page,
31 | _external=True,
32 | **kwargs,
33 | ),
34 | 'next': None,
35 | 'page': pagination_query.page,
36 | 'pages': pagination_query.pages,
37 | 'per_page': pagination_query.per_page,
38 | 'previous': None,
39 | 'total': pagination_query.total,
40 | }
41 |
42 | if pagination_query.has_prev:
43 | pagination_data['previous'] = url_for(
44 | str(request.endpoint),
45 | page=pagination_query.prev_num,
46 | per_page=pagination_query.per_page,
47 | _external=True,
48 | **kwargs,
49 | )
50 | if pagination_query.has_next:
51 | pagination_data['next'] = url_for(
52 | str(request.endpoint),
53 | page=pagination_query.next_num,
54 | per_page=pagination_query.per_page,
55 | _external=True,
56 | **kwargs,
57 | )
58 |
59 | return pagination_data
60 |
61 |
62 | def str_to_bool(item: Optional[str]) -> bool:
63 | """
64 | Convert a string to a boolean.
65 |
66 | :param str item: string to parse
67 | :return: a boolean equivalent
68 | :rtype: bool
69 | """
70 | if isinstance(item, str):
71 | return item.lower() in ('true', '1')
72 | else:
73 | return False
74 |
--------------------------------------------------------------------------------
/iib/web/wsgi.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from iib.web.app import create_app
3 |
4 | app = create_app()
5 |
--------------------------------------------------------------------------------
/iib/workers/__init__.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 |
--------------------------------------------------------------------------------
/iib/workers/api_utils.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import json
3 | import logging
4 | from typing import Any, Dict, Optional
5 |
6 | import requests
7 | from urllib3.util.retry import Retry
8 | import requests_kerberos
9 | from tenacity import (
10 | before_sleep_log,
11 | retry,
12 | retry_if_exception_type,
13 | stop_after_attempt,
14 | wait_exponential,
15 | )
16 |
17 | from iib.exceptions import IIBError
18 | from iib.workers.config import get_worker_config
19 | from iib.workers.tasks.iib_static_types import UpdateRequestPayload
20 | import time
21 | from iib.common.tracing import instrument_tracing
22 |
23 | log = logging.getLogger(__name__)
24 | config = get_worker_config()
25 |
26 |
27 | def get_requests_session(auth: bool = False) -> requests.Session:
28 | """
29 | Create a requests session with authentication (when enabled).
30 |
31 | :param bool auth: configure authentication on the session
32 | :return: the configured requests session
33 | :rtype: requests.Session
34 | """
35 | session = requests.Session()
36 | if auth:
37 | session.auth = requests_kerberos.HTTPKerberosAuth(
38 | mutual_authentication=requests_kerberos.OPTIONAL
39 | )
40 | retry = Retry(
41 | total=3, read=3, connect=3, backoff_factor=3, status_forcelist=(408, 500, 502, 503, 504)
42 | )
43 | adapter = requests.adapters.HTTPAdapter(max_retries=retry)
44 | session.mount('http://', adapter)
45 | session.mount('https://', adapter)
46 | return session
47 |
48 |
49 | def get_request(request_id: int) -> Dict[str, Any]:
50 | """
51 | Get the IIB build request from the REST API.
52 |
53 | :param int request_id: the ID of the IIB request
54 | :return: the request
55 | :rtype: dict
56 | :raises IIBError: if the HTTP request fails
57 | """
58 | request_url = f'{config.iib_api_url.rstrip("/")}/builds/{request_id}'
59 | log.info('Getting the request %d', request_id)
60 |
61 | try:
62 | rv = requests_session.get(request_url, timeout=config.iib_api_timeout)
63 | except requests.RequestException:
64 | msg = f'The connection failed when getting the request {request_id}'
65 | log.exception(msg)
66 | raise IIBError(msg)
67 |
68 | if not rv.ok:
69 | log.error(
70 | 'The worker failed to get the request %d. The status was %d. The text was:\n%s',
71 | request_id,
72 | rv.status_code,
73 | rv.text,
74 | )
75 | raise IIBError(f'The worker failed to get the request {request_id}')
76 |
77 | return rv.json()
78 |
79 |
80 | @instrument_tracing(span_name="workers.api_utils.set_request_state")
81 | def set_request_state(request_id: int, state: str, state_reason: str) -> Dict[str, Any]:
82 | """
83 | Set the state of the request using the IIB API.
84 |
85 | :param int request_id: the ID of the IIB request
86 | :param str state: the state to set the IIB request to
87 | :param str state_reason: the state reason to set the IIB request to
88 | :return: the updated request
89 | :rtype: dict
90 | :raise IIBError: if the request to the IIB API fails
91 | """
92 | log.info(
93 | 'Setting the state of request %d to "%s" with the reason "%s"',
94 | request_id,
95 | state,
96 | state_reason,
97 | )
98 | payload: UpdateRequestPayload = {'state': state, 'state_reason': state_reason}
99 | exc_msg = 'Setting the state to "{state}" on request {request_id} failed'
100 | return update_request(request_id, payload, exc_msg=exc_msg)
101 |
102 |
103 | def set_omps_operator_version(
104 | request_id: int,
105 | omps_operator_version: Dict[str, str],
106 | ) -> Dict[str, Any]:
107 | """
108 | Set the set_omps_operator_version of the request using the IIB API.
109 |
110 | :param int request_id: the ID of the IIB request
111 | :param dict omps_operator_version: the state to set the IIB request to
112 | :return: the updated request
113 | :rtype: dict
114 | :raise IIBError: if the request to the IIB API fails
115 | """
116 | omps_operator_version_json = json.dumps(omps_operator_version)
117 | log.info(
118 | 'Setting the omps_operator_version of request %d to "%s"',
119 | request_id,
120 | omps_operator_version_json,
121 | )
122 | payload: UpdateRequestPayload = {'omps_operator_version': omps_operator_version_json}
123 | exc_msg = 'Setting the omps_operator_version to "{omps_operator_version}" failed'
124 |
125 | return update_request(request_id, payload, exc_msg=exc_msg)
126 |
127 |
128 | @retry(
129 | before_sleep=before_sleep_log(log, logging.WARNING),
130 | reraise=True,
131 | retry=retry_if_exception_type(IIBError),
132 | stop=stop_after_attempt(config.iib_total_attempts),
133 | wait=wait_exponential(config.iib_retry_multiplier),
134 | )
135 | def update_request(
136 | request_id: int,
137 | payload: UpdateRequestPayload,
138 | exc_msg: Optional[str] = None,
139 | ) -> Dict[str, Any]:
140 | """
141 | Update the IIB build request.
142 |
143 | :param int request_id: the ID of the IIB request
144 | :param dict payload: the payload to send to the PATCH API endpoint
145 | :param str exc_msg: an optional custom exception that can be a template
146 | :return: the updated request
147 | :rtype: dict
148 | :raises IIBError: if the request to the IIB API fails
149 | """
150 | # Prevent a circular import
151 | start_time = time.time()
152 | request_url = f'{config.iib_api_url.rstrip("/")}/builds/{request_id}'
153 | log.info('Patching the request %d with %r', request_id, payload)
154 |
155 | try:
156 | patch_start_time = time.time()
157 | rv = requests_auth_session.patch(request_url, json=payload, timeout=config.iib_api_timeout)
158 | log.debug(f"Update_request patch duration: {time.time() - patch_start_time}")
159 | except requests.RequestException:
160 | msg = f'The connection failed when updating the request {request_id}'
161 | log.exception(msg)
162 | raise IIBError(msg)
163 |
164 | if not rv.ok:
165 | log.error(
166 | 'The worker failed to update the request %d. The status was %d. The text was:\n%s',
167 | request_id,
168 | rv.status_code,
169 | rv.text,
170 | )
171 | if exc_msg:
172 | _exc_msg = exc_msg.format(**payload, request_id=request_id)
173 | else:
174 | _exc_msg = f'The worker failed to update the request {request_id}'
175 | raise IIBError(_exc_msg)
176 |
177 | log.debug(f"Update_request duration: {time.time() - start_time}")
178 | return rv.json()
179 |
180 |
181 | requests_auth_session = get_requests_session(auth=True)
182 | requests_session = get_requests_session()
183 |
--------------------------------------------------------------------------------
/iib/workers/dogpile_cache.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import functools
3 | import hashlib
4 | from typing import Callable
5 |
6 | from dogpile.cache import make_region
7 | from dogpile.cache.region import CacheRegion
8 |
9 | from iib.workers.config import get_worker_config
10 |
11 |
12 | def skopeo_inspect_should_use_cache(*args, **kwargs) -> bool:
13 | """Return true in case this requests can be taken from or stored in cache."""
14 | return any(arg.find('@sha256:') != -1 for arg in args)
15 |
16 |
17 | def dogpile_cache(dogpile_region: CacheRegion, should_use_cache_fn: Callable) -> Callable:
18 | """
19 | Dogpile cache decorator.
20 |
21 | :params dogpile_region: Dogpile CacheRegion object
22 | :params should_use_cache_fn: function which determines if cache should be used
23 | """
24 |
25 | def cache_decorator(func):
26 | @functools.wraps(func)
27 | def inner(*args, **kwargs):
28 | should_cache = should_use_cache_fn(*args, **kwargs)
29 | cache_key = generate_cache_key(func.__name__, *args, **kwargs)
30 |
31 | if should_cache:
32 | # get data from cache
33 | output_cache = dogpile_region.get(cache_key)
34 | if output_cache:
35 | return output_cache
36 |
37 | output = func(*args, **kwargs)
38 |
39 | if should_cache:
40 | dogpile_region.set(cache_key, output)
41 |
42 | return output
43 |
44 | return inner
45 |
46 | return cache_decorator
47 |
48 |
49 | def generate_cache_key(fn: str, *args, **kwargs) -> str:
50 | """Generate key that is used in dogpile cache."""
51 | arguments = '|'.join(
52 | [str(arg) for arg in args] + [f'{kwarg}={kwargs[kwarg]}' for kwarg in kwargs]
53 | )
54 | key_str = f'{fn}|{arguments}'
55 | try:
56 | # error: Argument 1 to "sha256" has incompatible type "str"; expected Union[bytes, ...]]
57 | key = hashlib.sha256(key_str).hexdigest() # type: ignore
58 | except TypeError:
59 | key = hashlib.sha256(key_str.encode('utf-8')).hexdigest()
60 | return key
61 |
62 |
63 | def create_dogpile_region() -> CacheRegion:
64 | """Create and configure a dogpile region."""
65 | conf = get_worker_config()
66 |
67 | return make_region().configure(
68 | conf.iib_dogpile_backend,
69 | expiration_time=conf.iib_dogpile_expiration_time,
70 | arguments=conf.iib_dogpile_arguments,
71 | )
72 |
--------------------------------------------------------------------------------
/iib/workers/greenwave.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from copy import deepcopy
3 | import json
4 | import logging
5 | from typing import List
6 |
7 | import requests
8 | from celery.app.utils import Settings
9 |
10 | from iib.exceptions import IIBError
11 | from iib.workers.config import get_worker_config
12 | from iib.workers.tasks.utils import get_image_labels
13 | from iib.workers.tasks.iib_static_types import GreenwaveConfig
14 |
15 | log = logging.getLogger(__name__)
16 |
17 |
18 | def gate_bundles(bundles: List[str], greenwave_config: GreenwaveConfig) -> None:
19 | """
20 | Check if all bundle images have passed gating tests in the CVP pipeline.
21 |
22 | This function queries Greenwave to check if the policies are satisfied for each bundle image.
23 |
24 | :param list bundles: a list of strings representing the pull specifications of the bundles to
25 | be gated.
26 | :param dict greenwave_config: the dict of config required to query Greenwave to gate bundles.
27 | :raises IIBError: if any of the bundles fail the gating checks or IIB fails to get a
28 | response from Greenwave.
29 | """
30 | conf = get_worker_config()
31 | _validate_greenwave_params_and_config(conf, greenwave_config)
32 |
33 | log.info('Gating on bundles: %s', ', '.join(bundles))
34 | gating_unsatisfied_bundles = []
35 | testcases = []
36 | for bundle in bundles:
37 | koji_build_nvr = _get_koji_build_nvr(bundle)
38 | log.debug('Querying Greenwave for decision on %s', koji_build_nvr)
39 | payload = dict(deepcopy(greenwave_config))
40 | payload['subject_identifier'] = koji_build_nvr
41 | log.debug(
42 | 'Querying Greenwave with decision_context: %s, product_version: %s, '
43 | 'subject_identifier: %s and subject_type: %s',
44 | payload["decision_context"],
45 | payload["product_version"],
46 | payload["subject_identifier"],
47 | payload["subject_type"],
48 | )
49 |
50 | request_url = f'{conf["iib_greenwave_url"].rstrip("/")}/decision'
51 | resp = requests.post(request_url, json=payload, timeout=30)
52 | try:
53 | data = resp.json()
54 | except json.JSONDecodeError:
55 | log.error('Error encountered in decoding JSON %s', resp.text)
56 | data = {}
57 |
58 | if not resp.ok:
59 | error_msg = data.get('message') or resp.text
60 | log.error('Request to Greenwave failed: %s', error_msg)
61 | raise IIBError(f'Gating check failed for {bundle}: {error_msg}')
62 |
63 | try:
64 | if not data['policies_satisfied']:
65 | log.info('Gating decision for %s: %s', bundle, data)
66 | gating_unsatisfied_bundles.append(bundle)
67 | testcases = [item['testcase'] for item in data.get('unsatisfied_requirements', [])]
68 |
69 | except KeyError:
70 | log.error('Missing key "policies_satisfied" for %s: %s', bundle, data)
71 | raise IIBError(f'Key "policies_satisfied" missing in Greenwave response for {bundle}')
72 |
73 | if gating_unsatisfied_bundles:
74 | error_msg = (
75 | f'Unsatisfied Greenwave policy for {", ".join(gating_unsatisfied_bundles)} '
76 | f'with decision_context: {greenwave_config["decision_context"]}, '
77 | f'product_version: {greenwave_config["product_version"]}, '
78 | f'subject_type: {greenwave_config["subject_type"]} '
79 | f'and test cases: {", ".join(testcases)}'
80 | )
81 | raise IIBError(error_msg)
82 |
83 |
84 | def _get_koji_build_nvr(bundle: str) -> str:
85 | """
86 | Get the Koji build NVR of the bundle from its labels.
87 |
88 | :param str bundle: the pull specification of the bundle image to be gated.
89 | :return: the Koji build NVR of the bundle image.
90 | :rtype: str
91 | """
92 | labels = get_image_labels(bundle)
93 | return '{}-{}-{}'.format(labels['com.redhat.component'], labels['version'], labels['release'])
94 |
95 |
96 | def _validate_greenwave_params_and_config(
97 | conf: Settings,
98 | greenwave_config: GreenwaveConfig,
99 | ) -> None:
100 | """
101 | Validate payload parameters and config variables required for gating bundles.
102 |
103 | :param dict conf: the IIB worker configuration.
104 | :param dict greenwave_config: the dict of config required to query Greenwave to gate bundles.
105 | :raises IIBError: if IIB is not configured to handle gating of bundles.
106 | """
107 | if not conf.get('iib_greenwave_url'):
108 | log.error('iib_greenwave_url not set in the Celery config')
109 | raise IIBError('IIB is not configured to handle gating of bundles')
110 |
--------------------------------------------------------------------------------
/iib/workers/s3_utils.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import logging
3 |
4 | from botocore.exceptions import ClientError
5 | import boto3
6 |
7 | from iib.exceptions import IIBError
8 | from iib.workers.config import get_worker_config
9 |
10 | log = logging.getLogger(__name__)
11 |
12 |
13 | def upload_file_to_s3_bucket(file_path: str, s3_key_prefix: str, s3_file_name: str) -> None:
14 | """
15 | Upload artifact file to AWS S3 bucket.
16 |
17 | :param str file_path: the path of the file locally where the artifact file is stored
18 | :param str s3_key_prefix: the logical location of the file in the S3 bucket
19 | :param str s3_file_name: the name of the file in S3 bucket
20 | :raises IIBError: when unable to upload file to the S3 bucket
21 | """
22 | conf = get_worker_config()
23 | log.info(
24 | 'Uploading file %s/%s to S3 bucket: %s',
25 | s3_key_prefix,
26 | s3_file_name,
27 | conf['iib_aws_s3_bucket_name'],
28 | )
29 | s3 = boto3.resource(service_name='s3')
30 | try:
31 | s3.meta.client.upload_file(
32 | Filename=file_path,
33 | Bucket=conf['iib_aws_s3_bucket_name'],
34 | Key=f'{s3_key_prefix}/{s3_file_name}',
35 | )
36 | except ClientError as error:
37 | log.exception(error)
38 | raise IIBError(
39 | f'Unable to upload file {file_path} to bucket {conf["iib_aws_s3_bucket_name"]}: {error}'
40 | )
41 |
--------------------------------------------------------------------------------
/iib/workers/tasks/__init__.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 |
--------------------------------------------------------------------------------
/iib/workers/tasks/build_create_empty_index.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import logging
3 | import tempfile
4 | from typing import Dict, Optional
5 |
6 | from iib.common.tracing import instrument_tracing
7 | from iib.exceptions import IIBError
8 | from iib.workers.api_utils import set_request_state
9 | from iib.common.common_utils import get_binary_versions
10 | from iib.workers.tasks.build import (
11 | _add_label_to_index,
12 | _build_image,
13 | _cleanup,
14 | _create_and_push_manifest_list,
15 | _push_image,
16 | _update_index_image_build_state,
17 | _update_index_image_pull_spec,
18 | )
19 | from iib.workers.tasks.celery import app
20 | from iib.workers.tasks.fbc_utils import is_image_fbc
21 | from iib.workers.tasks.opm_operations import (
22 | opm_create_empty_fbc,
23 | opm_index_rm,
24 | Opm,
25 | get_operator_package_list,
26 | )
27 | from iib.workers.tasks.utils import (
28 | request_logger,
29 | prepare_request_for_build,
30 | RequestConfigCreateIndexImage,
31 | )
32 | from iib.workers.tasks.iib_static_types import PrebuildInfo
33 |
34 | __all__ = ['handle_create_empty_index_request']
35 |
36 | log = logging.getLogger(__name__)
37 |
38 |
39 | @app.task
40 | @request_logger
41 | @instrument_tracing(
42 | span_name="workers.tasks.handle_create_empty_index_request", attributes=get_binary_versions()
43 | )
44 | def handle_create_empty_index_request(
45 | from_index: str,
46 | request_id: int,
47 | output_fbc: bool = False,
48 | binary_image: Optional[str] = None,
49 | labels: Optional[Dict[str, str]] = None,
50 | binary_image_config: Optional[Dict[str, Dict[str, str]]] = None,
51 | ) -> None:
52 | """Coordinate the the work needed to create the index image with labels.
53 |
54 | :param str from_index: the pull specification of the container image containing the index that
55 | the index image build will be based from.
56 | :param int request_id: the ID of the IIB build request
57 | :param bool output_fbc: specifies whether a File-based Catalog index image should be created
58 | :param str binary_image: the pull specification of the container image where the opm binary
59 | gets copied from.
60 | :param dict labels: the dict of labels required to be added to a new index image
61 | :param dict binary_image_config: the dict of config required to identify the appropriate
62 | ``binary_image`` to use.
63 | """
64 | _cleanup()
65 | prebuild_info: PrebuildInfo = prepare_request_for_build(
66 | request_id,
67 | RequestConfigCreateIndexImage(
68 | _binary_image=binary_image,
69 | from_index=from_index,
70 | binary_image_config=binary_image_config,
71 | ),
72 | )
73 | from_index_resolved = prebuild_info['from_index_resolved']
74 | prebuild_info['labels'] = labels
75 | Opm.set_opm_version(from_index_resolved)
76 |
77 | if not output_fbc and is_image_fbc(from_index_resolved):
78 | log.debug('%s is FBC index image', from_index_resolved)
79 | err_msg = 'Cannot create SQLite index image from File-Based Catalog index image'
80 | log.error(err_msg)
81 | raise IIBError(err_msg)
82 |
83 | _update_index_image_build_state(request_id, prebuild_info)
84 |
85 | with tempfile.TemporaryDirectory(prefix=f'iib-{request_id}-') as temp_dir:
86 | set_request_state(request_id, 'in_progress', 'Checking operators present in index image')
87 |
88 | operators = get_operator_package_list(from_index_resolved, temp_dir)
89 |
90 | # if output_fbc parameter is true, create an empty FBC index image
91 | # else create empty SQLite index image
92 | if output_fbc:
93 | log.debug('Creating empty FBC index image from %s', from_index)
94 | opm_create_empty_fbc(
95 | request_id=request_id,
96 | temp_dir=temp_dir,
97 | from_index_resolved=from_index_resolved,
98 | from_index=from_index,
99 | binary_image=prebuild_info['binary_image'],
100 | operators=operators,
101 | )
102 | else:
103 | set_request_state(request_id, 'in_progress', 'Removing operators from index image')
104 | opm_index_rm(
105 | temp_dir,
106 | operators,
107 | prebuild_info['binary_image'],
108 | from_index_resolved,
109 | container_tool='podman',
110 | )
111 |
112 | set_request_state(
113 | request_id, 'in_progress', 'Getting and updating labels for new index image'
114 | )
115 |
116 | iib_labels = {
117 | 'com.redhat.index.delivery.version': prebuild_info['ocp_version'],
118 | 'com.redhat.index.delivery.distribution_scope': prebuild_info['distribution_scope'],
119 | }
120 |
121 | if labels:
122 | iib_labels.update(labels)
123 | for index_label, value in iib_labels.items():
124 | _add_label_to_index(index_label, value, temp_dir, 'index.Dockerfile')
125 |
126 | arches = prebuild_info['arches']
127 |
128 | for arch in sorted(arches):
129 | _build_image(temp_dir, 'index.Dockerfile', request_id, arch)
130 | _push_image(request_id, arch)
131 |
132 | set_request_state(request_id, 'in_progress', 'Creating the manifest list')
133 | output_pull_spec = _create_and_push_manifest_list(request_id, arches, [])
134 |
135 | _update_index_image_pull_spec(
136 | output_pull_spec=output_pull_spec,
137 | request_id=request_id,
138 | arches=arches,
139 | from_index=from_index,
140 | resolved_prebuild_from_index=from_index_resolved,
141 | )
142 | _cleanup()
143 | set_request_state(request_id, 'complete', 'The empty index image was successfully created')
144 |
--------------------------------------------------------------------------------
/iib/workers/tasks/build_fbc_operations.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import logging
3 | import tempfile
4 | from typing import Dict, Optional, Set
5 |
6 | from iib.common.common_utils import get_binary_versions
7 | from iib.common.tracing import instrument_tracing
8 | from iib.workers.api_utils import set_request_state
9 | from iib.workers.tasks.build import (
10 | _add_label_to_index,
11 | _build_image,
12 | _cleanup,
13 | _create_and_push_manifest_list,
14 | _push_image,
15 | _update_index_image_build_state,
16 | _update_index_image_pull_spec,
17 | )
18 | from iib.workers.tasks.celery import app
19 | from iib.workers.tasks.opm_operations import opm_registry_add_fbc_fragment, Opm
20 | from iib.workers.tasks.utils import (
21 | get_resolved_image,
22 | prepare_request_for_build,
23 | request_logger,
24 | set_registry_token,
25 | RequestConfigFBCOperation,
26 | )
27 |
28 | __all__ = ['handle_fbc_operation_request']
29 |
30 | log = logging.getLogger(__name__)
31 |
32 |
33 | @app.task
34 | @request_logger
35 | @instrument_tracing(
36 | span_name="workers.tasks.build.handle_fbc_operation_request", attributes=get_binary_versions()
37 | )
38 | def handle_fbc_operation_request(
39 | request_id: int,
40 | fbc_fragment: str,
41 | from_index: Optional[str] = None,
42 | binary_image: Optional[str] = None,
43 | distribution_scope: Optional[str] = None,
44 | overwrite_from_index: bool = False,
45 | overwrite_from_index_token: Optional[str] = None,
46 | build_tags: Optional[Set[str]] = None,
47 | add_arches: Optional[Set[str]] = None,
48 | binary_image_config: Optional[Dict[str, Dict[str, str]]] = None,
49 | index_to_gitlab_push_map: Optional[Dict[str, str]] = None,
50 | ) -> None:
51 | """
52 | Add a fbc fragment to an fbc index image.
53 |
54 | :param str fbc_fragment: fbc fragment that needs to be added to final FBC index image
55 | :param int request_id: the ID of the IIB build request
56 | :param str binary_image: the pull specification of the container image where the opm binary
57 | gets copied from.
58 | :param str from_index: the pull specification of the container image containing the index that
59 | the index image build will be based from.
60 | :param set add_arches: the set of arches to build in addition to the arches ``from_index`` is
61 | currently built for; if ``from_index`` is ``None``, then this is used as the list of arches
62 | to build the index image for
63 | :param dict index_to_gitlab_push_map: the dict mapping index images (keys) to GitLab repos
64 | (values) in order to push their catalogs into GitLab.
65 | """
66 | _cleanup()
67 | set_request_state(request_id, 'in_progress', 'Resolving the fbc fragment')
68 |
69 | with set_registry_token(overwrite_from_index_token, fbc_fragment, append=True):
70 | resolved_fbc_fragment = get_resolved_image(fbc_fragment)
71 |
72 | prebuild_info = prepare_request_for_build(
73 | request_id,
74 | RequestConfigFBCOperation(
75 | _binary_image=binary_image,
76 | from_index=from_index,
77 | overwrite_from_index_token=overwrite_from_index_token,
78 | add_arches=add_arches,
79 | fbc_fragment=fbc_fragment,
80 | distribution_scope=distribution_scope,
81 | binary_image_config=binary_image_config,
82 | ),
83 | )
84 |
85 | from_index_resolved = prebuild_info['from_index_resolved']
86 | binary_image_resolved = prebuild_info['binary_image_resolved']
87 | Opm.set_opm_version(from_index_resolved)
88 |
89 | prebuild_info['fbc_fragment_resolved'] = resolved_fbc_fragment
90 |
91 | _update_index_image_build_state(request_id, prebuild_info)
92 |
93 | with tempfile.TemporaryDirectory(prefix=f'iib-{request_id}-') as temp_dir:
94 | opm_registry_add_fbc_fragment(
95 | request_id,
96 | temp_dir,
97 | from_index_resolved,
98 | binary_image_resolved,
99 | resolved_fbc_fragment,
100 | overwrite_from_index_token,
101 | )
102 |
103 | _add_label_to_index(
104 | 'com.redhat.index.delivery.version',
105 | prebuild_info['ocp_version'],
106 | temp_dir,
107 | 'index.Dockerfile',
108 | )
109 |
110 | _add_label_to_index(
111 | 'com.redhat.index.delivery.distribution_scope',
112 | prebuild_info['distribution_scope'],
113 | temp_dir,
114 | 'index.Dockerfile',
115 | )
116 |
117 | arches = prebuild_info['arches']
118 | for arch in sorted(arches):
119 | _build_image(temp_dir, 'index.Dockerfile', request_id, arch)
120 | _push_image(request_id, arch)
121 |
122 | set_request_state(request_id, 'in_progress', 'Creating the manifest list')
123 | output_pull_spec = _create_and_push_manifest_list(request_id, arches, build_tags)
124 |
125 | _update_index_image_pull_spec(
126 | output_pull_spec,
127 | request_id,
128 | arches,
129 | from_index,
130 | overwrite_from_index,
131 | overwrite_from_index_token,
132 | from_index_resolved,
133 | add_or_rm=True,
134 | )
135 | _cleanup()
136 | set_request_state(
137 | request_id, 'complete', 'The FBC fragment was successfully added in the index image'
138 | )
139 |
--------------------------------------------------------------------------------
/iib/workers/tasks/build_recursive_related_bundles.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import copy
3 | import logging
4 | import os
5 | import tempfile
6 | from typing import Any, Dict, List, Optional
7 |
8 | from operator_manifest.operator import OperatorManifest
9 | import ruamel.yaml
10 |
11 | from iib.common.common_utils import get_binary_versions
12 | from iib.common.tracing import instrument_tracing
13 | from iib.exceptions import IIBError
14 | from iib.workers.api_utils import set_request_state, update_request
15 | from iib.workers.tasks.build import (
16 | _cleanup,
17 | _copy_files_from_image,
18 | )
19 | from iib.workers.tasks.build_regenerate_bundle import (
20 | _adjust_operator_bundle,
21 | get_related_bundle_images,
22 | write_related_bundles_file,
23 | )
24 | from iib.workers.config import get_worker_config
25 | from iib.workers.tasks.celery import app
26 | from iib.workers.tasks.utils import (
27 | get_resolved_image,
28 | podman_pull,
29 | request_logger,
30 | set_registry_auths,
31 | get_bundle_metadata,
32 | )
33 | from iib.workers.tasks.iib_static_types import UpdateRequestPayload
34 |
35 |
36 | __all__ = ['handle_recursive_related_bundles_request']
37 |
38 | yaml = ruamel.yaml.YAML()
39 | # IMPORTANT: ruamel will introduce a line break if the yaml line is longer than yaml.width.
40 | # Unfortunately, this causes issues for JSON values nested within a YAML file, e.g.
41 | # metadata.annotations."alm-examples" in a CSV file.
42 | # The default value is 80. Set it to a more forgiving higher number to avoid issues
43 | yaml.width = 200
44 | # ruamel will also cause issues when normalizing a YAML object that contains
45 | # a nested JSON object when it does not preserve quotes. Thus, it produces
46 | # invalid YAML. Let's prevent this from happening at all.
47 | yaml.preserve_quotes = True
48 | log = logging.getLogger(__name__)
49 |
50 |
51 | @app.task
52 | @request_logger
53 | @instrument_tracing(
54 | span_name="workers.tasks.build.handle_recursive_related_bundles_request",
55 | attributes=get_binary_versions(),
56 | )
57 | def handle_recursive_related_bundles_request(
58 | parent_bundle_image: str,
59 | organization: str,
60 | request_id: int,
61 | registry_auths: Optional[Dict[str, Any]] = None,
62 | ) -> None:
63 | """
64 | Coordinate the work needed to find recursive related bundles of the operator bundle image.
65 |
66 | :param str parent_bundle_image: the pull specification of the bundle image to whose related
67 | bundles are to be found.
68 | :param str organization: the name of the organization the bundle should be regenerated for.
69 | :param int request_id: the ID of the IIB build request
70 | :param dict registry_auths: Provide the dockerconfig.json for authentication to private
71 | registries, defaults to ``None``.
72 | :raises IIBError: if the recursive related bundles build fails.
73 | """
74 | _cleanup()
75 |
76 | set_request_state(request_id, 'in_progress', 'Resolving parent_bundle_image')
77 |
78 | with set_registry_auths(registry_auths):
79 | parent_bundle_image_resolved = get_resolved_image(parent_bundle_image)
80 |
81 | payload: UpdateRequestPayload = {
82 | 'parent_bundle_image_resolved': parent_bundle_image_resolved,
83 | 'state': 'in_progress',
84 | 'state_reason': (
85 | f'Finding recursive related bundles for the bundle: {parent_bundle_image}'
86 | ),
87 | }
88 | update_request(request_id, payload)
89 |
90 | recursive_related_bundles = [parent_bundle_image_resolved]
91 | current_level_related_bundles = [parent_bundle_image_resolved]
92 | total_related_bundles = 0
93 | conf = get_worker_config()
94 | traversal_completed = False
95 | while not traversal_completed:
96 | temp_current_level_related_bundles = copy.deepcopy(current_level_related_bundles)
97 | current_level_related_bundles = []
98 | for bundle in temp_current_level_related_bundles:
99 | children_related_bundles = process_parent_bundle_image(
100 | bundle, request_id, organization
101 | )
102 | current_level_related_bundles.extend(children_related_bundles)
103 |
104 | total_related_bundles += len(children_related_bundles)
105 | if total_related_bundles >= conf['iib_max_recursive_related_bundles']:
106 | raise IIBError('Max number of related bundles exceeded. Potential DOS attack!')
107 |
108 | recursive_related_bundles.extend(current_level_related_bundles)
109 | if not current_level_related_bundles:
110 | traversal_completed = True
111 |
112 | payload = {
113 | 'state': 'in_progress',
114 | 'state_reason': 'Writing recursive related bundles to a file',
115 | }
116 | update_request(request_id, payload, exc_msg='Failed setting the bundle image on the request')
117 | # Reverse the list while writing because we did a top to bottom level traversal of a tree.
118 | # The return value should be a bottom to top level traversal.
119 | write_related_bundles_file(
120 | recursive_related_bundles[::-1],
121 | request_id,
122 | conf['iib_request_recursive_related_bundles_dir'],
123 | 'recursive_related_bundles',
124 | )
125 |
126 | payload = {
127 | 'state': 'complete',
128 | 'state_reason': 'The request completed successfully',
129 | }
130 | _cleanup()
131 | update_request(request_id, payload, exc_msg='Failed setting the bundle image on the request')
132 |
133 |
134 | def process_parent_bundle_image(
135 | bundle_image_resolved: str, request_id: int, organization: Optional[str] = None
136 | ) -> List[str]:
137 | """
138 | Apply required customization and get children bundles (aka related bundles) for a bundle image.
139 |
140 | :param str bundle_image_resolved: the pull specification of the bundle image to whose children
141 | bundles are to be found.
142 | :param int request_id: the ID of the IIB build request
143 | :param str organization: the name of the organization the to apply customizations on.
144 | :rtype: list
145 | :return: the list of all children bundles for a parent bundle image
146 | :raises IIBError: if fails to process the parent bundle image.
147 | """
148 | # Pull the bundle_image to ensure steps later on don't fail due to registry timeouts
149 | podman_pull(bundle_image_resolved)
150 |
151 | with tempfile.TemporaryDirectory(prefix=f'iib-{request_id}-') as temp_dir:
152 | manifests_path = os.path.join(temp_dir, 'manifests')
153 | _copy_files_from_image(bundle_image_resolved, '/manifests', manifests_path)
154 | metadata_path = os.path.join(temp_dir, 'metadata')
155 | _copy_files_from_image(bundle_image_resolved, '/metadata', metadata_path)
156 | if organization:
157 | _adjust_operator_bundle(
158 | manifests_path,
159 | metadata_path,
160 | request_id,
161 | organization,
162 | recursive_related_bundles=True,
163 | )
164 |
165 | try:
166 | operator_manifest = OperatorManifest.from_directory(manifests_path)
167 | except (ruamel.yaml.YAMLError, ruamel.yaml.constructor.DuplicateKeyError) as e:
168 | error = f'The Operator Manifest is not in a valid YAML format: {e}'
169 | log.exception(error)
170 | raise IIBError(error)
171 |
172 | bundle_metadata = get_bundle_metadata(operator_manifest, pinned_by_iib=False)
173 | return get_related_bundle_images(bundle_metadata)
174 |
--------------------------------------------------------------------------------
/iib/workers/tasks/celery.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import os
3 |
4 | import celery
5 | from celery.signals import celeryd_init
6 | from opentelemetry.instrumentation.celery import CeleryInstrumentor
7 | from opentelemetry.instrumentation.requests import RequestsInstrumentor
8 | from celery.signals import worker_process_init
9 |
10 | from iib.workers.config import configure_celery, validate_celery_config
11 | from iib.common.tracing import TracingWrapper
12 |
13 | tracerWrapper = TracingWrapper()
14 |
15 |
16 | app = celery.Celery()
17 | configure_celery(app)
18 | celeryd_init.connect(validate_celery_config)
19 |
20 | if os.getenv('IIB_OTEL_TRACING', '').lower() == 'true':
21 | RequestsInstrumentor().instrument(trace_provider=tracerWrapper.provider)
22 |
23 |
24 | # Add the init_celery_tracing method with its annotation
25 | @worker_process_init.connect(weak=False)
26 | def init_celery_tracing(*args, **kwargs):
27 | """Initialize the tracing for celery."""
28 | if os.getenv('IIB_OTEL_TRACING', '').lower() == 'true':
29 | CeleryInstrumentor().instrument(trace_provider=tracerWrapper.provider)
30 |
--------------------------------------------------------------------------------
/iib/workers/tasks/fbc_utils.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | # This file contains functions that are common for File-Based Catalog image type
3 | import contextlib
4 | import os
5 | import logging
6 | import shutil
7 | import json
8 | from datetime import datetime
9 | from pathlib import Path
10 | from typing import Tuple, List
11 |
12 | import ruamel.yaml
13 |
14 | from iib.exceptions import IIBError
15 | from iib.workers.config import get_worker_config
16 | from iib.common.tracing import instrument_tracing
17 |
18 | log = logging.getLogger(__name__)
19 | yaml = ruamel.yaml.YAML()
20 |
21 |
22 | def is_image_fbc(image: str) -> bool:
23 | """
24 | Detect File-Based catalog image.
25 |
26 | We can have two types of image - SQLite and FBC
27 | Those can be distinguished by LABELS.
28 | Image with File-Based catalog will have defined this LABEL:
29 | "operators.operatorframework.io.index.configs.v1"
30 |
31 | :param str image: the pull specification of the container image (usually from_image)
32 | :return: True if image is FBC type, False otherwise (SQLite)
33 | :rtype: bool
34 | """
35 | from iib.workers.tasks.utils import get_image_label
36 |
37 | return bool(get_image_label(image, 'operators.operatorframework.io.index.configs.v1'))
38 |
39 |
40 | @instrument_tracing(span_name='iib.workers.tasks.fbc_utils.get_catalog_dir')
41 | def get_catalog_dir(from_index: str, base_dir: str) -> str:
42 | """
43 | Get file-based catalog directory from the specified index image and save it locally.
44 |
45 | :param str from_index: index image to get file-based catalog directory from.
46 | :param str base_dir: base directory to which the database file should be saved.
47 | :return: path to the copied file-based catalog directory.
48 | :rtype: str
49 | :raises IIBError: if any podman command fails.
50 | """
51 | from iib.workers.tasks.build import _copy_files_from_image
52 | from iib.workers.tasks.utils import get_image_label
53 |
54 | log.info("Store file-based catalog directory from %s", from_index)
55 | fbc_dir_path = get_image_label(from_index, 'operators.operatorframework.io.index.configs.v1')
56 | if not fbc_dir_path:
57 | error_msg = f'Index image {from_index} does not contain file-based catalog.'
58 | log.error(error_msg)
59 | raise IIBError(error_msg)
60 |
61 | _copy_files_from_image(from_index, fbc_dir_path, base_dir)
62 | return os.path.join(base_dir, os.path.basename(fbc_dir_path))
63 |
64 |
65 | def get_hidden_index_database(from_index: str, base_dir: str) -> str:
66 | """
67 | Get hidden database file from the specified index image and save it locally.
68 |
69 | :param str from_index: index image to get database file from.
70 | :param str base_dir: base directory to which the database file should be saved.
71 | :return: path to the copied database file.
72 | :rtype: str
73 | """
74 | from iib.workers.tasks.build import _copy_files_from_image
75 |
76 | log.info("Store hidden index.db from %s", from_index)
77 | conf = get_worker_config()
78 | base_db_file = os.path.join(base_dir, conf['temp_index_db_path'])
79 | os.makedirs(os.path.dirname(base_db_file), exist_ok=True)
80 | _copy_files_from_image(from_index, conf['hidden_index_db_path'], base_db_file)
81 | return base_db_file
82 |
83 |
84 | def merge_catalogs_dirs(src_config: str, dest_config: str):
85 | """
86 | Merge two catalog directories by replacing everything from src_config over dest_config.
87 |
88 | :param str src_config: source config directory
89 | :param str dest_config: destination config directory
90 | """
91 | from iib.workers.tasks.opm_operations import opm_validate
92 |
93 | for conf_dir in (src_config, dest_config):
94 | if not os.path.isdir(conf_dir):
95 | msg = f"config directory does not exist: {conf_dir}"
96 | log.error(msg)
97 | raise IIBError(msg)
98 |
99 | log.info("Merging config folders: %s to %s", src_config, dest_config)
100 | shutil.copytree(src_config, dest_config, dirs_exist_ok=True)
101 | enforce_json_config_dir(conf_dir)
102 | opm_validate(conf_dir)
103 |
104 |
105 | def extract_fbc_fragment(temp_dir: str, fbc_fragment: str) -> Tuple[str, List[str]]:
106 | """
107 | Extract operator packages from the fbc_fragment image.
108 |
109 | :param str temp_dir: base temp directory for IIB request.
110 | :param str fbc_fragment: pull specification of fbc_fragment in the IIB request.
111 | :return: fbc_fragment path, fbc_operator_packages.
112 | :rtype: tuple
113 | """
114 | from iib.workers.tasks.build import _copy_files_from_image
115 |
116 | log.info("Extracting the fbc_fragment's catalog from %s", fbc_fragment)
117 | # store the fbc_fragment at /tmp/iib-**/fbc-fragment
118 | conf = get_worker_config()
119 | fbc_fragment_path = os.path.join(temp_dir, conf['temp_fbc_fragment_path'])
120 | # Copy fbc_fragment's catalog to /tmp/iib-**/fbc-fragment
121 | _copy_files_from_image(fbc_fragment, conf['fbc_fragment_catalog_path'], fbc_fragment_path)
122 |
123 | log.info("fbc_fragment extracted at %s", fbc_fragment_path)
124 | operator_packages = os.listdir(fbc_fragment_path)
125 | log.info("fbc_fragment contains packages %s", operator_packages)
126 | if not operator_packages:
127 | raise IIBError("No operator packages in fbc_fragment %s", fbc_fragment)
128 |
129 | return fbc_fragment_path, operator_packages
130 |
131 |
132 | def _serialize_datetime(obj: datetime) -> str:
133 | """
134 | Serialize datetime objects.
135 |
136 | :param obj: datetime object to serialize
137 | :return: JSON serializable object as string.
138 | :rtype: str
139 | """
140 | if isinstance(obj, datetime):
141 | return obj.isoformat()
142 | raise TypeError(f"Type {type(obj)} is not serializable.")
143 |
144 |
145 | def enforce_json_config_dir(config_dir: str) -> None:
146 | """
147 | Ensure the files from config dir are in JSON format.
148 |
149 | It will walk recursively and convert any YAML files to the JSON format.
150 |
151 | :param str config_dir: The config dir to walk recursively converting any YAML to JSON.
152 | """
153 | log.info("Enforcing JSON content on config_dir: %s", config_dir)
154 | for dirpath, _, filenames in os.walk(config_dir):
155 | for file in filenames:
156 | in_file = os.path.join(dirpath, file)
157 | if in_file.lower().endswith(".yaml"):
158 | out_file = os.path.join(dirpath, f"{Path(in_file).stem}.json")
159 | log.debug(f"Converting {in_file} to {out_file}.")
160 | # Make sure the output file doesn't exist before opening in append mode
161 | with contextlib.suppress(FileNotFoundError):
162 | os.remove(out_file)
163 | # The input file may contain multiple chunks, we must append them accordingly
164 | with open(in_file, 'r') as yaml_in, open(out_file, 'a') as json_out:
165 | data = yaml.load_all(yaml_in)
166 | for chunk in data:
167 | json.dump(chunk, json_out, default=_serialize_datetime)
168 | os.remove(in_file)
169 |
--------------------------------------------------------------------------------
/iib/workers/tasks/general.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import logging
3 | from typing import Any
4 |
5 | import celery.app.task
6 |
7 | from iib.exceptions import IIBError
8 | from iib.workers.api_utils import set_request_state
9 | from iib.workers.tasks.celery import app
10 | from iib.workers.tasks.utils import request_logger
11 | from iib.workers.tasks.build import _cleanup
12 |
13 | __all__ = ['failed_request_callback', 'set_request_state']
14 |
15 | log = logging.getLogger(__name__)
16 |
17 |
18 | @app.task
19 | @request_logger
20 | def failed_request_callback(
21 | context: celery.app.task.Context,
22 | exc: Exception,
23 | traceback: Any,
24 | request_id: int,
25 | ) -> None:
26 | """
27 | Wrap set_request_state for task error callbacks.
28 |
29 | :param celery.app.task.Context context: the context of the task failure
30 | :param Exception exc: the exception that caused the task failure
31 | :param int request_id: the ID of the IIB request
32 | """
33 | if isinstance(exc, IIBError):
34 | msg = str(exc)
35 | else:
36 | msg = 'An unknown error occurred. See logs for details'
37 | log.error(msg, exc_info=exc)
38 |
39 | _cleanup()
40 | set_request_state(request_id, 'failed', msg)
41 |
--------------------------------------------------------------------------------
/iib/workers/tasks/iib_static_types.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from typing import Dict, List, Optional, Set, Union
3 | from typing_extensions import NotRequired, TypedDict
4 |
5 | from operator_manifest.operator import ImageName, OperatorCSV
6 |
7 | # Note: When IIB will be used only with python 3.11,
8 | # we can remove Classes ending with ...Base, and those keys mark as NotRequired[...]
9 |
10 |
11 | class BundleMetadata(TypedDict):
12 | """Type class referencing data of bundle used for mypy checking."""
13 |
14 | found_pullspecs: Set[ImageName]
15 | operator_csvs: List[OperatorCSV]
16 |
17 |
18 | class IndexImageInfo(TypedDict):
19 | """Type class referencing data related to index image used for mypy checking."""
20 |
21 | arches: Set[str]
22 | ocp_version: str
23 | resolved_distribution_scope: str
24 | resolved_from_index: Optional[str]
25 |
26 |
27 | class AllIndexImagesInfo(TypedDict):
28 | """Type class referencing group of IndexImageInfo classes used for mypy checking."""
29 |
30 | from_index: IndexImageInfo
31 | source_from_index: IndexImageInfo
32 | target_index: IndexImageInfo
33 |
34 |
35 | class PrebuildInfo(TypedDict):
36 | """Type class referencing data related to preparation of request for building the image."""
37 |
38 | arches: Set[str]
39 | binary_image: str
40 | binary_image_resolved: str
41 | bundle_mapping: NotRequired[Dict[str, List[str]]]
42 | bundle_replacements: NotRequired[Dict[str, str]]
43 | distribution_scope: str
44 | extra: NotRequired[str]
45 | from_index_resolved: NotRequired[str]
46 | labels: NotRequired[Optional[Dict[str, str]]]
47 | ocp_version: NotRequired[str]
48 | source_from_index_resolved: NotRequired[str]
49 | source_ocp_version: NotRequired[str]
50 | target_index_resolved: NotRequired[str]
51 | target_ocp_version: NotRequired[str]
52 | fbc_fragment_resolved: NotRequired[str]
53 |
54 |
55 | class BundleImage(TypedDict):
56 | """Base type class referencing data related to bundle image used for mypy checking."""
57 |
58 | bundlePath: str
59 | csvName: NotRequired[str]
60 | packageName: str
61 | version: str
62 |
63 |
64 | class UpdateRequestPayload(TypedDict, total=False):
65 | """Type class referencing possible parameters used with IIB API."""
66 |
67 | arches: NotRequired[List[str]]
68 | binary_image: NotRequired[str]
69 | binary_image_resolved: NotRequired[str]
70 | bundle_image: NotRequired[str]
71 | bundle_mapping: NotRequired[Dict[str, List[str]]]
72 | bundle_replacements: NotRequired[Dict[str, str]]
73 | distribution_scope: NotRequired[str]
74 | from_bundle_image_resolved: NotRequired[str]
75 | from_index_resolved: NotRequired[str]
76 | fbc_fragment: NotRequired[str]
77 | fbc_fragment_resolved: NotRequired[str]
78 | index_image: NotRequired[str]
79 | index_image_resolved: NotRequired[str]
80 | internal_index_image_copy: NotRequired[str]
81 | internal_index_image_copy_resolved: NotRequired[str]
82 | omps_operator_version: NotRequired[str]
83 | parent_bundle_image_resolved: NotRequired[str]
84 | source_from_index_resolved: NotRequired[str]
85 | state: NotRequired[str]
86 | state_reason: NotRequired[str]
87 | target_index_resolved: NotRequired[str]
88 |
89 |
90 | class GreenwaveConfig(TypedDict):
91 | """Type class referencing configuration of Greenwawe app."""
92 |
93 | decision_context: str
94 | product_version: str
95 | subject_type: str
96 |
97 |
98 | class IIBOrganizationCustomizations(TypedDict):
99 | """TypedDict class for typing the DevelopmentConfig class."""
100 |
101 | type: str
102 |
103 |
104 | class CSVAnnotations(IIBOrganizationCustomizations):
105 | """TypedDict class for typing the DevelopmentConfig class."""
106 |
107 | annotations: Dict[str, str]
108 |
109 |
110 | class PackageNameSuffix(IIBOrganizationCustomizations):
111 | """TypedDict class for typing the DevelopmentConfig class."""
112 |
113 | suffix: str
114 |
115 |
116 | class ImageNameFromLabels(IIBOrganizationCustomizations):
117 | """TypedDict class for typing the DevelopmentConfig class."""
118 |
119 | template: str
120 |
121 |
122 | class RegistryReplacements(IIBOrganizationCustomizations):
123 | """TypedDict class for typing the DevelopmentConfig class."""
124 |
125 | replacements: Dict[str, str]
126 |
127 |
128 | class EncloseRepo(IIBOrganizationCustomizations):
129 | """TypedDict class for typing the DevelopmentConfig class."""
130 |
131 | enclosure_glue: str
132 | namespace: str
133 |
134 |
135 | iib_organization_customizations_type = Dict[
136 | str,
137 | List[
138 | Union[
139 | CSVAnnotations,
140 | EncloseRepo,
141 | IIBOrganizationCustomizations,
142 | ImageNameFromLabels,
143 | PackageNameSuffix,
144 | RegistryReplacements,
145 | ]
146 | ],
147 | ]
148 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 100
3 | skip-string-normalization = true
4 | target-version = ['py38', 'py39']
5 |
--------------------------------------------------------------------------------
/requirements-test.in:
--------------------------------------------------------------------------------
1 | # Always use the already pinned versions of the runtime dependencies to
2 | # ensure the test environment is as similar as possible.
3 | -r requirements.txt
4 | coverage
5 | pytest
6 | pytest-cov
7 | setuptools
8 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from setuptools import setup, find_packages
3 |
4 | setup(
5 | name='iib',
6 | version='9.2.4',
7 | long_description=__doc__,
8 | packages=find_packages(exclude=['tests', 'tests.*']),
9 | include_package_data=True,
10 | zip_safe=False,
11 | install_requires=[
12 | 'boto3',
13 | 'celery',
14 | 'dogpile.cache',
15 | 'flask',
16 | 'flask-login',
17 | 'flask-migrate',
18 | 'flask-sqlalchemy',
19 | 'importlib-resources',
20 | 'operator-manifest==0.0.5',
21 | 'psycopg2-binary',
22 | 'python-memcached ',
23 | 'python-qpid-proton==0.38.0',
24 | 'requests',
25 | 'requests-kerberos',
26 | 'ruamel.yaml',
27 | 'ruamel.yaml.clib',
28 | 'tenacity',
29 | 'typing-extensions',
30 | 'packaging',
31 | 'opentelemetry-api',
32 | 'opentelemetry-sdk',
33 | 'opentelemetry-exporter-otlp',
34 | 'opentelemetry-instrumentation-flask',
35 | 'opentelemetry-instrumentation',
36 | 'opentelemetry-instrumentation-wsgi',
37 | 'opentelemetry-instrumentation-sqlalchemy',
38 | 'opentelemetry-instrumentation-celery',
39 | 'opentelemetry-instrumentation-requests',
40 | 'opentelemetry-instrumentation-logging',
41 | 'opentelemetry-instrumentation-botocore',
42 | ],
43 | classifiers=[
44 | 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
45 | 'Programming Language :: Python :: 3.12',
46 | 'Programming Language :: Python :: 3.13',
47 | ],
48 | entry_points={'console_scripts': ['iib=iib.web.manage:cli']},
49 | license="GPLv3+",
50 | python_requires='>=3.12',
51 | )
52 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 |
--------------------------------------------------------------------------------
/tests/test_web/test_models.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from datetime import timedelta
3 | from unittest import mock
4 |
5 | import pytest
6 |
7 | from iib.exceptions import ValidationError
8 | from iib.web import models
9 |
10 |
11 | def test_request_add_architecture(db, minimal_request):
12 | minimal_request.add_architecture('amd64')
13 | minimal_request.add_architecture('s390x')
14 | db.session.commit()
15 | assert len(minimal_request.architectures) == 2
16 | assert minimal_request.architectures[0].name == 'amd64'
17 | assert minimal_request.architectures[1].name == 's390x'
18 |
19 | # Verify that the method is idempotent
20 | minimal_request.add_architecture('amd64')
21 | db.session.commit()
22 | assert len(minimal_request.architectures) == 2
23 |
24 |
25 | def test_request_add_tag(db, minimal_request):
26 | binary_image = models.Image(pull_specification='quay.io/add/binary-image:latest2')
27 | db.session.add(binary_image)
28 | batch = models.Batch()
29 | db.session.add(batch)
30 | request = models.RequestAdd(batch=batch, binary_image=binary_image)
31 | db.session.add(request)
32 | db.session.commit()
33 | minimal_request.add_build_tag('build-tag1')
34 |
35 | minimal_request.add_build_tag('build-tag1')
36 | minimal_request.add_build_tag('build-tag1')
37 | minimal_request.add_build_tag('build-tag2')
38 | db.session.commit()
39 | assert len(minimal_request.build_tags) == 2
40 | assert minimal_request.build_tags[0].name == 'build-tag1'
41 | assert minimal_request.build_tags[1].name == 'build-tag2'
42 |
43 |
44 | def test_request_add_state(db, minimal_request):
45 | minimal_request.add_state('in_progress', 'Starting things up')
46 | minimal_request.add_state('complete', 'All done!')
47 | db.session.commit()
48 |
49 | assert len(minimal_request.states) == 2
50 | assert minimal_request.state.state_name == 'complete'
51 | assert minimal_request.state.state_reason == 'All done!'
52 | assert minimal_request.states[0].state_name == 'in_progress'
53 | # Ensure that minimal_request.state is the latest state
54 | assert minimal_request.state == minimal_request.states[1]
55 |
56 |
57 | def test_request_add_state_invalid_state(db, minimal_request):
58 | with pytest.raises(ValidationError, match='The state "invalid" is invalid'):
59 | minimal_request.add_state('invalid', 'Starting things up')
60 |
61 |
62 | @pytest.mark.parametrize('state', ('complete', 'failed'))
63 | def test_request_add_state_already_done(state, db, minimal_request):
64 | with pytest.raises(ValidationError, match=f'A {state} request cannot change states'):
65 | minimal_request.add_state(state, 'Done')
66 | db.session.commit()
67 | minimal_request.add_state('in_progress', 'Oops!')
68 |
69 |
70 | def test_request_temporary_data_expiration(app, db, minimal_request):
71 | minimal_request.add_state('in_progress', 'Starting things up')
72 | db.session.commit()
73 | app.config['IIB_REQUEST_DATA_DAYS_TO_LIVE'] = 99
74 | updated = minimal_request.state.updated
75 | assert minimal_request.temporary_data_expiration == (updated + timedelta(days=99))
76 |
77 |
78 | def test_get_state_names():
79 | assert models.RequestStateMapping.get_names() == ['complete', 'failed', 'in_progress']
80 |
81 |
82 | def test_get_type_names():
83 | assert models.RequestTypeMapping.get_names() == [
84 | 'add',
85 | 'add_deprecations',
86 | 'create_empty_index',
87 | 'fbc_operations',
88 | 'generic',
89 | 'merge_index_image',
90 | 'recursive_related_bundles',
91 | 'regenerate_bundle',
92 | 'rm',
93 | ]
94 |
95 |
96 | @pytest.mark.parametrize(
97 | 'type_num, is_valid',
98 | [
99 | (0, True),
100 | (1, True),
101 | (2, True),
102 | (3, True),
103 | (4, True),
104 | (5, True),
105 | (6, True),
106 | (7, True),
107 | (90, False),
108 | ('1', False),
109 | (None, False),
110 | ],
111 | )
112 | def test_request_type_validation(type_num, is_valid):
113 | if is_valid:
114 | models.Request(type=type_num)
115 | else:
116 | with pytest.raises(ValidationError, match=f'{type_num} is not a valid request type number'):
117 | models.Request(type=type_num)
118 |
119 |
120 | def test_batch_user(db, minimal_request_add, minimal_request_rm):
121 | minimal_request_add.user = models.User(username='han_solo@SW.COM')
122 | minimal_request_rm.user = models.User(username='yoda@SW.COM')
123 | db.session.commit()
124 |
125 | assert minimal_request_add.batch.user.username == 'han_solo@SW.COM'
126 | assert minimal_request_rm.batch.user.username == 'yoda@SW.COM'
127 |
128 |
129 | @pytest.mark.parametrize('last_request_state', ('in_progress', 'failed', 'complete'))
130 | def test_batch_state(last_request_state, db):
131 | binary_image = models.Image(pull_specification='quay.io/add/binary-image:latest')
132 | db.session.add(binary_image)
133 | batch = models.Batch()
134 | db.session.add(batch)
135 | for i in range(3):
136 | request = models.RequestAdd(batch=batch, binary_image=binary_image)
137 | request.add_state('complete', 'Some reason')
138 | db.session.add(request)
139 |
140 | request = models.RequestAdd(batch=batch, binary_image=binary_image)
141 | request.add_state(last_request_state, 'Some reason')
142 | db.session.add(request)
143 | db.session.commit()
144 |
145 | assert request.batch.state == last_request_state
146 |
147 |
148 | def test_batch_request_states(db):
149 | binary_image = models.Image(pull_specification='quay.io/add/binary-image:latest')
150 | db.session.add(binary_image)
151 | batch = models.Batch()
152 | db.session.add(batch)
153 | for state in ('in_progress', 'failed', 'complete'):
154 | request = models.RequestAdd(batch=batch, binary_image=binary_image)
155 | request.add_state(state, 'Some state')
156 | db.session.add(request)
157 |
158 | db.session.commit()
159 |
160 | assert request.batch.request_states == ['in_progress', 'failed', 'complete']
161 |
162 |
163 | @pytest.mark.parametrize(
164 | 'registry_auths, msg_error',
165 | (
166 | ([{'registry.redhat.io': {'auth': 'YOLO'}}], '"registry_auths" must be a dict'),
167 | (
168 | {
169 | 'auths': {'registry.redhat.io': {'auth': 'YOLO'}},
170 | 'foo': {'registry.redhat.stage.io': {'auth': 'YOLO2'}},
171 | },
172 | '"registry_auths" must contain single key "auths"',
173 | ),
174 | ({'auths': {}}, '"registry_auths.auths" must be a non-empty dict'),
175 | (
176 | {'auths': {'registry': {'authS': 'YOLO'}}},
177 | 'registry in registry_auths has auth value in incorrect format. '
178 | 'See the API docs for details on the expected format',
179 | ),
180 | (
181 | {'auths': {'registry': ['auth', 'YOLO']}},
182 | 'registry in registry_auths has auth value in incorrect format. '
183 | 'See the API docs for details on the expected format',
184 | ),
185 | (
186 | {'auths': {'registry': {'auth': 'YOLO', 'foo': 'YOLO2'}}},
187 | 'registry in registry_auths has auth value in incorrect format. '
188 | 'See the API docs for details on the expected format',
189 | ),
190 | ),
191 | )
192 | def test_validate_registry_auths(registry_auths, msg_error):
193 | with pytest.raises(ValidationError, match=msg_error):
194 | models.validate_registry_auths(registry_auths)
195 |
196 |
197 | @mock.patch('iib.web.models.url_for')
198 | def test_request_logs_and_related_bundles_in_response(
199 | mock_url_for, app, db, minimal_request_regenerate_bundle
200 | ):
201 | mock_url_for.return_value = 'some-url-for-data'
202 | minimal_request_regenerate_bundle.add_state('in_progress', 'Starting things up')
203 | db.session.commit()
204 | app.config['IIB_AWS_S3_BUCKET_NAME'] = 'some_bucket'
205 | app.config['IIB_REQUEST_LOGS_DIR'] = None
206 | app.config['IIB_REQUEST_RELATED_BUNDLES_DIR'] = None
207 |
208 | rv = minimal_request_regenerate_bundle.to_json(verbose=True)
209 | assert rv['logs']['url'] == 'some-url-for-data'
210 | assert rv['related_bundles']['url'] == 'some-url-for-data'
211 | assert rv['bundle_replacements'] == {}
212 |
--------------------------------------------------------------------------------
/tests/test_web/test_s3_utils.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from unittest import mock
3 |
4 | import botocore
5 | from botocore.response import StreamingBody
6 |
7 | from iib.web import s3_utils
8 |
9 |
10 | @mock.patch('iib.web.s3_utils.boto3')
11 | def test_get_object_from_s3_bucket(mock_boto3):
12 | mock_client = mock.Mock()
13 | mock_boto3.client.return_value = mock_client
14 | mock_body = StreamingBody('lots of data', 0)
15 | mock_client.get_object.return_value = {'Body': mock_body}
16 |
17 | response = s3_utils.get_object_from_s3_bucket('prefix', 'file', 's3-bucket')
18 |
19 | assert response == mock_body
20 | mock_boto3.client.assert_called_once_with('s3')
21 | mock_client.get_object.assert_called_once_with(Bucket='s3-bucket', Key='prefix/file')
22 |
23 |
24 | @mock.patch('iib.web.s3_utils.boto3')
25 | def test_get_object_from_s3_bucket_failure(mock_boto3):
26 | mock_client = mock.Mock()
27 | mock_boto3.client.return_value = mock_client
28 | error_msg = {
29 | 'Error': {'Code': 'SomeServiceException', 'Message': 'Something went horribly wrong'}
30 | }
31 | mock_client.get_object.side_effect = botocore.exceptions.ClientError(error_msg, 'get_object')
32 |
33 | response = s3_utils.get_object_from_s3_bucket('prefix', 'file', 's3-bucket')
34 | assert response is None
35 |
--------------------------------------------------------------------------------
/tests/test_workers/__init__.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 |
--------------------------------------------------------------------------------
/tests/test_workers/test_api_utils.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from unittest import mock
3 |
4 | import requests
5 | import pytest
6 |
7 | from iib.exceptions import IIBError
8 | from iib.workers import api_utils
9 | from iib.workers.config import get_worker_config
10 |
11 | config = get_worker_config()
12 |
13 |
14 | @mock.patch('iib.workers.api_utils.requests_session')
15 | def test_get_request(mock_session):
16 | mock_session.get.return_value.ok = True
17 | mock_session.get.return_value.json.return_value = '{"id": 3}'
18 |
19 | api_utils.get_request(3)
20 |
21 | mock_session.get.assert_called_once_with('http://iib-api:8080/api/v1/builds/3', timeout=120)
22 |
23 |
24 | @mock.patch('iib.workers.api_utils.requests_session')
25 | def test_get_request_connection_failed(mock_session):
26 | mock_session.get.side_effect = requests.ConnectionError()
27 |
28 | with pytest.raises(IIBError, match='The connection failed.+'):
29 | api_utils.get_request(3)
30 |
31 |
32 | @mock.patch('iib.workers.api_utils.requests_session')
33 | def test_get_request_not_ok(mock_session):
34 | mock_session.get.return_value.ok = False
35 |
36 | with pytest.raises(IIBError, match='The worker failed to get the request 3'):
37 | api_utils.get_request(3)
38 |
39 |
40 | @mock.patch('iib.workers.api_utils.update_request')
41 | def test_set_request_state(mock_update_request):
42 | state = 'failed'
43 | state_reason = 'Ran out of gas'
44 | api_utils.set_request_state(3, state, state_reason)
45 |
46 | mock_update_request.assert_called_once()
47 | assert mock_update_request.call_args[0][1] == {'state': state, 'state_reason': state_reason}
48 |
49 |
50 | @mock.patch('iib.workers.api_utils.requests_auth_session')
51 | def test_set_omps_operator_version(mock_session):
52 | omps_operator_version = {'operator': '1.0.0'}
53 | api_utils.set_omps_operator_version(3, omps_operator_version)
54 |
55 | mock_session.patch.assert_called_once_with(
56 | 'http://iib-api:8080/api/v1/builds/3',
57 | json={'omps_operator_version': '{"operator": "1.0.0"}'},
58 | timeout=120,
59 | )
60 |
61 |
62 | @mock.patch('iib.workers.api_utils.requests_auth_session')
63 | def test_update_request(mock_session):
64 | mock_session.patch.return_value.ok = True
65 | mock_session.patch.return_value.json.return_value = '{"id": 3}'
66 |
67 | api_utils.update_request(3, {'index_image': 'index-image:latest'})
68 |
69 | mock_session.patch.assert_called_once_with(
70 | 'http://iib-api:8080/api/v1/builds/3',
71 | json={'index_image': 'index-image:latest'},
72 | timeout=120,
73 | )
74 |
75 |
76 | @mock.patch('iib.workers.api_utils.requests_auth_session')
77 | def test_update_request_connection_failed(mock_session):
78 | mock_session.patch.side_effect = requests.ConnectionError()
79 |
80 | with pytest.raises(IIBError, match='The connection failed.+'):
81 | api_utils.update_request(3, {'index_image': 'index-image:latest'})
82 | assert mock_session.patch.call_count == config.iib_total_attempts
83 |
84 |
85 | @pytest.mark.parametrize(
86 | 'exc_msg, expected',
87 | (
88 | (None, 'The worker failed to update the request 3'),
89 | (
90 | 'Failed to set index_image={index_image} on request {request_id}',
91 | 'Failed to set index_image=index-image:latest on request 3',
92 | ),
93 | ),
94 | )
95 | @mock.patch('iib.workers.api_utils.requests_auth_session')
96 | def test_update_request_not_ok(mock_session, exc_msg, expected):
97 | mock_session.patch.return_value.ok = False
98 |
99 | with pytest.raises(IIBError, match=expected):
100 | api_utils.update_request(3, {'index_image': 'index-image:latest'}, exc_msg=exc_msg)
101 |
--------------------------------------------------------------------------------
/tests/test_workers/test_dogpile_cache.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import pytest
3 |
4 | from iib.workers.dogpile_cache import generate_cache_key
5 |
6 |
7 | @pytest.mark.parametrize(
8 | "args,kwargs",
9 | [
10 | (['a', 'r', 'g', 's'], {"k": "kwargs"}),
11 | (
12 | [
13 | "Lorem ipsum dolor sit amet, consectetuer adipiscing elit. ",
14 | "Aenean commodo ligula eget dolor. Aenean massa. Cum sociis ",
15 | "natoque penatibus et magnis dis parturient montes, nascetur ",
16 | "ridiculus mus. Donec quam felis, ultricies nec, pellentesque eu",
17 | "pretium quis, sem. Nulla consequat massa quis enim. Donec.",
18 | ],
19 | {"k": "kwargs"},
20 | ),
21 | (
22 | ['a', 'r', 'g', 's'],
23 | {
24 | "long": """Lorem ipsum dolor sit amet, consectetuer adipiscing elit.
25 | Aenean commodo ligula eget dolor. Aenean massa. Cum sociis
26 | natoque penatibus et magnis dis parturient montes, nascetur""",
27 | "kwargs": """ridiculus mus. Donec quam felis, ultricies nec, pellentesque eu,
28 | pretium quis, sem. Nulla consequat massa quis enim. Donec.""",
29 | },
30 | ),
31 | ],
32 | )
33 | def test_generate_cache_key(args, kwargs):
34 | passwd = generate_cache_key('function_name', *args, **kwargs)
35 | assert len(passwd) <= 250
36 |
--------------------------------------------------------------------------------
/tests/test_workers/test_greenwave.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import json
3 | from unittest import mock
4 |
5 | import pytest
6 |
7 | from iib.exceptions import IIBError
8 | from iib.workers import greenwave
9 |
10 |
11 | @mock.patch('iib.workers.greenwave._get_koji_build_nvr')
12 | @mock.patch('iib.workers.greenwave.requests.post')
13 | def test_gate_bundles_success(mock_requests, mock_gkbn):
14 | mock_gkbn.return_value = 'n-v-r'
15 | mock_requests.return_value.ok = True
16 | mock_requests.return_value.json.return_value = {"policies_satisfied": True}
17 |
18 | greenwave_config = {
19 | 'subject_type': 'koji_build',
20 | 'decision_context': 'iib_cvp_redhat_operator',
21 | 'product_version': 'cvp',
22 | }
23 | greenwave.gate_bundles(['some-bundle'], greenwave_config)
24 | mock_gkbn.assert_called_once_with('some-bundle')
25 | mock_requests.assert_called_once()
26 |
27 |
28 | @pytest.mark.parametrize(
29 | 'greenwave_request_success, greenwave_json_rv, error_msg',
30 | (
31 | (
32 | False,
33 | {'message': 'Koji build unavailable'},
34 | 'Gating check failed for some-bundle: Koji build unavailable',
35 | ),
36 | (
37 | True,
38 | {'Random Greenwave error': 'Response Changed'},
39 | 'Key "policies_satisfied" missing in Greenwave response for some-bundle',
40 | ),
41 | (
42 | True,
43 | {
44 | 'policies_satisfied': False,
45 | 'unsatisfied_requirements': [
46 | {
47 | 'result_id': 123,
48 | 'subject_identifier': 'some-bundle-container-1.5.0-4',
49 | 'subject_type': 'koji_build',
50 | 'testcase': 'test-case-operator-metadata-fetch',
51 | 'type': 'test-result-passed',
52 | },
53 | {
54 | 'result_id': 1234,
55 | 'subject_identifier': 'some-bundle-container-1.5.0-4',
56 | 'subject_type': 'koji_build',
57 | 'testcase': 'test-case-operator-metadata-preparation',
58 | 'type': 'test-result-passed',
59 | },
60 | ],
61 | },
62 | (
63 | 'Unsatisfied Greenwave policy for some-bundle '
64 | 'with decision_context: iib_cvp_redhat_operator, '
65 | 'product_version: cvp, subject_type: koji_build '
66 | 'and test cases: test-case-operator-metadata-fetch, '
67 | 'test-case-operator-metadata-preparation'
68 | ),
69 | ),
70 | ),
71 | )
72 | @mock.patch('iib.workers.greenwave._get_koji_build_nvr')
73 | @mock.patch('iib.workers.greenwave.requests.post')
74 | def test_gate_bundles_failure(
75 | mock_requests, mock_gkbn, greenwave_request_success, greenwave_json_rv, error_msg
76 | ):
77 | mock_gkbn.return_value = 'n-v-r'
78 | mock_requests.return_value.ok = greenwave_request_success
79 | mock_requests.return_value.json.return_value = greenwave_json_rv
80 |
81 | greenwave_config = {
82 | 'subject_type': 'koji_build',
83 | 'decision_context': 'iib_cvp_redhat_operator',
84 | 'product_version': 'cvp',
85 | }
86 | with pytest.raises(IIBError, match=error_msg):
87 | greenwave.gate_bundles(['some-bundle'], greenwave_config)
88 | mock_gkbn.assert_called_once_with('some-bundle')
89 | mock_requests.assert_called_once()
90 |
91 |
92 | @mock.patch('iib.workers.greenwave._get_koji_build_nvr')
93 | @mock.patch('iib.workers.greenwave.requests.post')
94 | def test_gate_bundles_invalid_json(mock_requests, mock_gkbn):
95 | mock_gkbn.return_value = 'n-v-r'
96 | mock_requests.return_value.ok = True
97 | mock_requests.return_value.json.side_effect = json.JSONDecodeError("error", "\n\n", 1)
98 |
99 | greenwave_config = {
100 | 'subject_type': 'koji_build',
101 | 'decision_context': 'iib_cvp_redhat_operator',
102 | 'product_version': 'cvp',
103 | }
104 | error_msg = 'Key "policies_satisfied" missing in Greenwave response for some-bundle'
105 | with pytest.raises(IIBError, match=error_msg):
106 | greenwave.gate_bundles(['some-bundle'], greenwave_config)
107 | mock_gkbn.assert_called_once_with('some-bundle')
108 | mock_requests.assert_called_once()
109 |
110 |
111 | @mock.patch('iib.workers.greenwave.get_image_labels')
112 | def test_get_koji_build_nvr(mock_gil):
113 | mock_gil.return_value = {'com.redhat.component': 'name', 'version': 1, 'release': '32'}
114 | assert greenwave._get_koji_build_nvr('some-image:latest') == 'name-1-32'
115 |
116 |
117 | def test_verify_greenwave_config_failure():
118 | error_msg = 'IIB is not configured to handle gating of bundles'
119 | greenwave_config = {'subject_type': 'koji_build'}
120 | with pytest.raises(IIBError, match=error_msg):
121 | greenwave._validate_greenwave_params_and_config({}, greenwave_config)
122 |
--------------------------------------------------------------------------------
/tests/test_workers/test_s3_utils.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | import re
3 | from unittest import mock
4 |
5 | from botocore.exceptions import ClientError
6 | import pytest
7 |
8 | from iib.exceptions import IIBError
9 | from iib.workers import s3_utils
10 |
11 |
12 | @mock.patch('iib.workers.s3_utils.boto3')
13 | def test_upload_file_to_s3_bucket(mock_boto3):
14 | my_mock = mock.MagicMock()
15 | mock_boto3.resource.return_value = my_mock
16 | my_mock.meta.client.upload_file.return_value = None
17 |
18 | s3_utils.upload_file_to_s3_bucket('file', 'prefix', 'file')
19 |
20 | mock_boto3.resource.assert_called_once_with(service_name='s3')
21 | my_mock.meta.client.upload_file.assert_called_once_with(
22 | Bucket=None, Filename='file', Key='prefix/file'
23 | )
24 |
25 |
26 | @mock.patch('iib.workers.s3_utils.boto3')
27 | def test_upload_file_to_s3_bucket_failure(mock_boto3):
28 | my_mock = mock.MagicMock()
29 | mock_boto3.resource.return_value = my_mock
30 | err_msg = {'Error': {'Code': 400, 'Message': 'Something went horribly wrong'}}
31 | my_mock.meta.client.upload_file.side_effect = ClientError(err_msg, 'upload')
32 |
33 | error = re.escape(
34 | 'Unable to upload file file to bucket None: An error occurred (400)'
35 | ' when calling the upload operation: Something went horribly wrong'
36 | )
37 | with pytest.raises(IIBError, match=error):
38 | s3_utils.upload_file_to_s3_bucket('file', 'prefix', 'file')
39 |
--------------------------------------------------------------------------------
/tests/test_workers/test_tasks/__init__.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 |
--------------------------------------------------------------------------------
/tests/test_workers/test_tasks/test_build_fbc_operations.py:
--------------------------------------------------------------------------------
1 | from unittest import mock
2 |
3 |
4 | from iib.workers.tasks import build_fbc_operations
5 | from iib.workers.tasks.utils import RequestConfigFBCOperation
6 |
7 |
8 | @mock.patch('iib.workers.tasks.build_fbc_operations._update_index_image_pull_spec')
9 | @mock.patch('iib.workers.tasks.build_fbc_operations._create_and_push_manifest_list')
10 | @mock.patch('iib.workers.tasks.build_fbc_operations._push_image')
11 | @mock.patch('iib.workers.tasks.build_fbc_operations._build_image')
12 | @mock.patch('iib.workers.tasks.build_fbc_operations._add_label_to_index')
13 | @mock.patch('iib.workers.tasks.build_fbc_operations.opm_registry_add_fbc_fragment')
14 | @mock.patch('iib.workers.tasks.build_fbc_operations._update_index_image_build_state')
15 | @mock.patch('iib.workers.tasks.build_fbc_operations.prepare_request_for_build')
16 | @mock.patch('iib.workers.tasks.utils.get_resolved_image')
17 | @mock.patch('iib.workers.tasks.build_fbc_operations.get_resolved_image')
18 | @mock.patch('iib.workers.tasks.build_fbc_operations.set_request_state')
19 | @mock.patch('iib.workers.tasks.build_fbc_operations._cleanup')
20 | @mock.patch('iib.workers.tasks.opm_operations.Opm.set_opm_version')
21 | def test_handle_fbc_operation_request(
22 | mock_sov,
23 | mock_cleanup,
24 | mock_srs,
25 | mock_gri,
26 | mock_ugri,
27 | mock_prfb,
28 | mock_uiibs,
29 | mock_oraff,
30 | mock_alti,
31 | mock_bi,
32 | mock_pi,
33 | mock_cpml,
34 | mock_uiips,
35 | ):
36 | request_id = 10
37 | from_index = 'from-index:latest'
38 | binary_image = 'binary-image:latest'
39 | binary_image_config = {'prod': {'v4.5': 'some_image'}}
40 | fbc_fragment = 'fbc-fragment:latest'
41 | arches = {'amd64', 's390x'}
42 | from_index_resolved = 'from-index@sha256:bcdefg'
43 |
44 | mock_prfb.return_value = {
45 | 'arches': arches,
46 | 'binary_image': binary_image,
47 | 'binary_image_resolved': 'binary-image@sha256:abcdef',
48 | 'from_index_resolved': from_index_resolved,
49 | 'ocp_version': 'v4.6',
50 | 'distribution_scope': "prod",
51 | }
52 | mock_gri.return_value = 'fbc-fragment@sha256:qwerty'
53 |
54 | build_fbc_operations.handle_fbc_operation_request(
55 | request_id=request_id,
56 | fbc_fragment=fbc_fragment,
57 | from_index=from_index,
58 | binary_image=binary_image,
59 | binary_image_config=binary_image_config,
60 | )
61 | mock_prfb.assert_called_once_with(
62 | request_id,
63 | RequestConfigFBCOperation(
64 | _binary_image=binary_image,
65 | from_index=from_index,
66 | overwrite_from_index_token=None,
67 | add_arches=None,
68 | binary_image_config=binary_image_config,
69 | distribution_scope='prod',
70 | fbc_fragment='fbc-fragment@sha256:qwerty',
71 | ),
72 | )
73 | mock_sov.assert_called_once_with(from_index_resolved)
74 | mock_oraff.assert_called_once()
75 | mock_cpml.assert_called_once_with(request_id, {'s390x', 'amd64'}, None)
76 | assert mock_srs.call_count == 3
77 | assert mock_alti.call_count == 2
78 | assert mock_bi.call_count == 2
79 | assert mock_pi.call_count == 2
80 | assert mock_srs.call_args[0][1] == 'complete'
81 |
--------------------------------------------------------------------------------
/tests/test_workers/test_tasks/test_build_recursive_related_bundles.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from unittest import mock
3 |
4 | from operator_manifest.operator import ImageName
5 | import pytest
6 |
7 | from iib.exceptions import IIBError
8 | from iib.workers.tasks import build_recursive_related_bundles
9 |
10 |
11 | # Re-use the yaml instance to ensure configuration is also used in tests
12 | yaml = build_recursive_related_bundles.yaml
13 |
14 |
15 | @pytest.mark.parametrize('organization', ('acme', None))
16 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles._cleanup')
17 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.get_resolved_image')
18 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.podman_pull')
19 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.tempfile.TemporaryDirectory')
20 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles._copy_files_from_image')
21 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles._adjust_operator_bundle')
22 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.set_request_state')
23 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.get_worker_config')
24 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.update_request')
25 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.OperatorManifest.from_directory')
26 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.get_bundle_metadata')
27 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.get_related_bundle_images')
28 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.write_related_bundles_file')
29 | def test_handle_recusrsive_related_bundles_request(
30 | mock_wrbf,
31 | mock_grbi,
32 | mock_gbm,
33 | mock_omfd,
34 | mock_ur,
35 | mock_gwc,
36 | mock_srs,
37 | mock_aob,
38 | mock_cffi,
39 | mock_temp_dir,
40 | mock_pp,
41 | mock_gri,
42 | mock_cleanup,
43 | organization,
44 | tmpdir,
45 | ):
46 | parent_bundle_image = 'bundle-image:latest'
47 | parent_bundle_image_resolved = 'bundle-image@sha256:abcdef'
48 | org = organization
49 | request_id = 99
50 |
51 | mock_temp_dir.return_value.__enter__.return_value = str(tmpdir)
52 | mock_gri.return_value = parent_bundle_image_resolved
53 | mock_gwc.return_value = {
54 | 'iib_max_recursive_related_bundles': 15,
55 | 'iib_request_recursive_related_bundles_dir': 'some-dir',
56 | 'iib_registry': 'quay.io',
57 | }
58 | mock_omfd.return_value = 'operator-manifest'
59 | mock_gbm.side_effect = [
60 | {'found_pullspecs': [ImageName.parse('pullspec-1'), ImageName.parse('pullspec-2')]},
61 | {'found_pullspecs': []},
62 | {'found_pullspecs': []},
63 | ]
64 | mock_grbi.side_effect = [['pullspec-1', 'pullspec-2'], [], []]
65 |
66 | build_recursive_related_bundles.handle_recursive_related_bundles_request(
67 | parent_bundle_image, org, request_id
68 | )
69 | assert mock_cleanup.call_count == 2
70 | assert mock_gbm.call_count == 3
71 | assert mock_grbi.call_count == 3
72 | assert mock_ur.call_count == 3
73 | if org:
74 | mock_aob.cal_count = 3
75 | else:
76 | mock_aob.assert_not_called()
77 | mock_gri.assert_called_once()
78 | mock_wrbf.assert_called_once_with(
79 | ['pullspec-2', 'pullspec-1', 'bundle-image@sha256:abcdef'],
80 | 99,
81 | 'some-dir',
82 | 'recursive_related_bundles',
83 | )
84 |
85 |
86 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles._cleanup')
87 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.get_resolved_image')
88 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.podman_pull')
89 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.tempfile.TemporaryDirectory')
90 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles._copy_files_from_image')
91 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles._adjust_operator_bundle')
92 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.set_request_state')
93 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.get_worker_config')
94 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.update_request')
95 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.OperatorManifest.from_directory')
96 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.get_bundle_metadata')
97 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.get_related_bundle_images')
98 | @mock.patch('iib.workers.tasks.build_recursive_related_bundles.write_related_bundles_file')
99 | def test_handle_recusrsive_related_bundles_request_max_bundles_reached(
100 | mock_wrbf,
101 | mock_grbi,
102 | mock_gbm,
103 | mock_omfd,
104 | mock_ur,
105 | mock_gwc,
106 | mock_srs,
107 | mock_aob,
108 | mock_cffi,
109 | mock_temp_dir,
110 | mock_pp,
111 | mock_gri,
112 | mock_cleanup,
113 | tmpdir,
114 | ):
115 | parent_bundle_image = 'bundle-image:latest'
116 | parent_bundle_image_resolved = 'bundle-image@sha256:abcdef'
117 | organization = 'acme'
118 | request_id = 99
119 |
120 | mock_temp_dir.return_value.__enter__.return_value = str(tmpdir)
121 | mock_gri.return_value = parent_bundle_image_resolved
122 | mock_gwc.return_value = {
123 | 'iib_max_recursive_related_bundles': 15,
124 | 'iib_request_recursive_related_bundles_dir': 'some-dir',
125 | 'iib_registry': 'quay.io',
126 | }
127 | mock_omfd.return_value = 'operator-manifest'
128 | mock_gbm.return_value = {
129 | 'found_pullspecs': [
130 | ImageName.parse('child-bundle-1'),
131 | ImageName.parse('child-bundle-2'),
132 | ImageName.parse('child-bundle-3'),
133 | ]
134 | }
135 | mock_grbi.return_value = ['child-bundle-1', 'child-bundle-2', 'child-bundle-3']
136 |
137 | expected = 'Max number of related bundles exceeded. Potential DOS attack!'
138 | with pytest.raises(IIBError, match=expected):
139 | build_recursive_related_bundles.handle_recursive_related_bundles_request(
140 | parent_bundle_image, organization, request_id
141 | )
142 | assert mock_gbm.call_count == 5
143 | assert mock_grbi.call_count == 5
144 | assert mock_ur.call_count == 2
145 | mock_gri.assert_called_once()
146 |
--------------------------------------------------------------------------------
/tests/test_workers/test_tasks/test_general.py:
--------------------------------------------------------------------------------
1 | # SPDX-License-Identifier: GPL-3.0-or-later
2 | from unittest import mock
3 |
4 | import pytest
5 |
6 | from iib.exceptions import IIBError
7 | from iib.workers.tasks import general
8 |
9 |
10 | @pytest.mark.parametrize(
11 | 'exc, expected_msg',
12 | (
13 | (IIBError('Is it lunch time yet?'), 'Is it lunch time yet?'),
14 | (RuntimeError('I cannot run in the rain!'), 'An unknown error occurred'),
15 | ),
16 | )
17 | @mock.patch('iib.workers.tasks.general._cleanup')
18 | @mock.patch('iib.workers.tasks.general.set_request_state')
19 | def test_failed_request_callback(mock_srs, mock_cleanup, exc, expected_msg):
20 | general.failed_request_callback(None, exc, None, 3)
21 | mock_srs(3, expected_msg)
22 | mock_cleanup.assert_called_once()
23 |
--------------------------------------------------------------------------------
/tests/test_workers/test_tasks/test_utils_cache.py:
--------------------------------------------------------------------------------
1 | from unittest import mock
2 |
3 | import pytest
4 |
5 | from iib.workers.dogpile_cache import skopeo_inspect_should_use_cache
6 | from iib.workers.tasks import utils
7 |
8 |
9 | @pytest.mark.parametrize(
10 | 'value, result',
11 | [
12 | ('docker://with_digest@sha256:93120347593478509347tdsvzkljbn', True),
13 | ('docker://without_digest:tag', False),
14 | ],
15 | )
16 | def test_should_cache(value, result):
17 | assert skopeo_inspect_should_use_cache(value) is result
18 |
19 |
20 | @mock.patch('dogpile.cache.region.CacheRegion.get')
21 | @mock.patch('iib.workers.tasks.utils.run_cmd')
22 | def test_skopeo_inspect_cache(mock_run_cmd, moc_dpr_get):
23 | mock_run_cmd.return_value = '{"Name": "some-image-cache"}'
24 | image = 'docker://some-image-cache@sha256:129bfb6af3e03997eb_not_real_sha_c7c18d89b40d97'
25 | rv_expected = {'Name': 'some-image-cache'}
26 | moc_dpr_get.return_value = rv_expected
27 |
28 | rv = utils.skopeo_inspect(image)
29 | assert rv == rv_expected
30 | assert mock_run_cmd.called is False
31 |
32 | assert mock_run_cmd.call_args is None
33 |
34 |
35 | @mock.patch('dogpile.cache.region.CacheRegion.get')
36 | @mock.patch('iib.workers.tasks.utils.run_cmd')
37 | def test_skopeo_inspect_no_cache(mock_run_cmd, moc_dpr_get):
38 | mock_run_cmd.return_value = '{"Name": "some-image-cache"}'
39 | image = 'docker://some-image-no-cache:tag'
40 | rv_expected = {"Name": "some-image-cache"}
41 |
42 | rv = utils.skopeo_inspect(image)
43 | assert rv == rv_expected
44 | assert mock_run_cmd.called is True
45 | assert moc_dpr_get.called is False
46 |
47 | skopeo_args = mock_run_cmd.call_args[0][0]
48 | args_expected = ['skopeo', '--command-timeout', '300s', 'inspect', image]
49 | assert skopeo_args == args_expected
50 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | min_version = 4.0
3 | skip_missing_interpreters = true
4 | envlist = black,docs,flake8,py312,py312,safety,yamllint,bandit, mypy
5 | downloadcache = {toxworkdir}/_download/
6 | labels =
7 | test = unit_tests
8 | static = black, flake8, yamllint, mypy
9 | security = bandit
10 | docs = docs
11 |
12 | [gh-actions]
13 | python =
14 | 3.12: py312, black, flake8, mypy, yamllint, bandit, safety, docs, unit_tests
15 | 3.13: py312, black, flake8, mypy, yamllint, bandit, safety, docs, unit_tests
16 |
17 | [testenv]
18 | usedevelop = true
19 | # 3.12 is the current supported version on RHEL 8.10
20 | basepython =
21 | py312: python3.12
22 | py313: python3.13
23 | migrate-db: python3.12
24 | pip-compile: python3.12
25 | setenv =
26 | IIB_TESTING=true
27 | pytest_command =
28 | pytest -vv \
29 | --capture=sys --cov-config .coveragerc --cov=iib --cov-report term \
30 | --cov-report xml --cov-report html {posargs}
31 |
32 | [testenv:black]
33 | description = black checks [Mandatory]
34 | skip_install = true
35 | deps =
36 | black==22.3.0
37 | commands =
38 | black --check --diff iib tests
39 |
40 | [testenv:docs]
41 | description = build docs [Mandatory]
42 | skip_install = true
43 | deps =
44 | -r docs/requirements.txt
45 | commands =
46 | sphinx-build -W -E docs "{toxworkdir}/docs_out"
47 |
48 | [testenv:flake8]
49 | description = PEP8 checks [Mandatory]
50 | skip_install = true
51 | deps =
52 | flake8==3.7.9
53 | flake8-docstrings==1.5.0
54 | commands =
55 | flake8
56 |
57 | [testenv:yamllint]
58 | description = YAML checks [Mandatory]
59 | skip_install = true
60 | deps =
61 | yamllint==1.24.2
62 | commands =
63 | yamllint .
64 |
65 | [testenv:py312]
66 | description = Python 3.12 unit tests [Mandatory]
67 | commands =
68 | {[testenv]pytest_command}
69 | deps =
70 | -rrequirements-test.txt
71 |
72 |
73 | [testenv:py313]
74 | description = Python 3.13 unit tests
75 | commands =
76 | {[testenv]pytest_command}
77 | deps =
78 | -rrequirements-test.txt
79 |
80 | [testenv:unit_tests]
81 | description = Python unit tests [Mandatory]
82 | commands =
83 | {[testenv]pytest_command}
84 | deps =
85 | -rrequirements-test.txt
86 |
87 | [testenv:safety]
88 | description = dependency vulnerability checks [Mandatory]
89 | skip_install = true
90 | deps =
91 | safety
92 | commands =
93 | safety check -r requirements.txt
94 |
95 | [testenv:bandit]
96 | description = static application security testing [Mandatory]
97 | skip_install = true
98 | deps =
99 | bandit
100 | commands =
101 | bandit -ll -r .
102 |
103 | [testenv:mypy]
104 | description = type check iib
105 | skip_install = true
106 | deps =
107 | mypy
108 | types-retry
109 | types-requests
110 | commands =
111 | mypy --ignore-missing-imports ./iib
112 |
113 | [flake8]
114 | ignore = D100,D104,D105,W503
115 | max-line-length = 100
116 | per-file-ignores =
117 | # Ignore missing docstrings in the tests and migrations
118 | tests/*:D103
119 | iib/web/migrations/*:D103
120 |
121 | [pytest]
122 | log_level = NOTSET
123 |
124 | [testenv:migrate-db]
125 | description = helper function for migrating databases
126 | deps =
127 | -rrequirements-test.txt
128 | setenv =
129 | FLASK_APP=iib/web/app.py
130 | commands =
131 | flask db stamp head
132 | flask db upgrade
133 | flask db migrate -m {posargs}
134 |
135 | [testenv:pip-compile]
136 | description = helper function to regenerate requirements files
137 | skip_install = true
138 | deps = pip-tools
139 | commands =
140 | pip-compile -U --generate-hashes --output-file=requirements.txt {posargs}
141 | pip-compile -U --allow-unsafe --generate-hashes --output-file=requirements-test.txt requirements-test.in {posargs}
142 |
143 |
144 |
--------------------------------------------------------------------------------