├── CODEOWNERS ├── .gitignore ├── mlflow ├── requirements.in ├── rock-ci-metadata.yaml ├── tests │ └── test_rock.py ├── rockcraft.yaml ├── tox.ini └── requirements.txt ├── .github ├── workflows │ ├── on_pull_request.yaml │ ├── scan_images.yaml │ └── on_push.yaml ├── .jira_sync_config.yaml └── ISSUE_TEMPLATE │ ├── task.yaml │ └── bug.yaml ├── SECURITY.md └── README.md /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @canonical/kubeflow -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .tox 3 | *.rock 4 | venv 5 | -------------------------------------------------------------------------------- /mlflow/requirements.in: -------------------------------------------------------------------------------- 1 | boto3 2 | cryptography 3 | mlflow==2.22.0 4 | prometheus-flask-exporter 5 | pymysql 6 | -------------------------------------------------------------------------------- /mlflow/rock-ci-metadata.yaml: -------------------------------------------------------------------------------- 1 | integrations: 2 | - consumer-repository: https://github.com/canonical/mlflow-operator.git 3 | 4 | replace-image: 5 | - file: metadata.yaml 6 | path: resources.oci-image.upstream-source 7 | -------------------------------------------------------------------------------- /.github/workflows/on_pull_request.yaml: -------------------------------------------------------------------------------- 1 | name: On Pull Request 2 | 3 | on: 4 | pull_request: 5 | 6 | jobs: 7 | 8 | on-pull-request: 9 | name: Get rocks modified and build-scan-test them 10 | uses: canonical/charmed-kubeflow-workflows/.github/workflows/get-rocks-modified-and-build-scan-test-publish.yaml@main 11 | permissions: 12 | pull-requests: read 13 | secrets: inherit 14 | with: 15 | microk8s-channel: 1.32-strict/stable 16 | juju-channel: 3.6/stable 17 | python-version: "3.8" 18 | rockcraft-channel: latest/stable 19 | -------------------------------------------------------------------------------- /.github/workflows/scan_images.yaml: -------------------------------------------------------------------------------- 1 | name: Scan images 2 | 3 | on: 4 | schedule: 5 | - cron: '00 23 * * *' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | 10 | scan-images: 11 | name: Scan published images and report vulnerabilities 12 | uses: canonical/charmed-kubeflow-workflows/.github/workflows/get-published-images-scan-and-report.yaml@main 13 | strategy: 14 | matrix: 15 | branch: [main, track/2.1] 16 | secrets: 17 | GH_TOKEN: ${{ secrets.GH_TOKEN }} 18 | with: 19 | severity: "HIGH,CRITICAL" 20 | branch: ${{ matrix.branch }} 21 | -------------------------------------------------------------------------------- /.github/workflows/on_push.yaml: -------------------------------------------------------------------------------- 1 | name: On Push 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - track/** 8 | 9 | jobs: 10 | 11 | on-push: 12 | name: Get rocks modified and build-scan-test-publish them 13 | uses: canonical/charmed-kubeflow-workflows/.github/workflows/get-rocks-modified-and-build-scan-test-publish.yaml@main 14 | permissions: 15 | pull-requests: read 16 | secrets: inherit 17 | with: 18 | microk8s-channel: 1.32-strict/stable 19 | juju-channel: 3.6/stable 20 | python-version: "3.8" 21 | rockcraft-channel: latest/stable 22 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security policy 2 | 3 | ## Supported Versions 4 | 5 | The Charmed MLflow project releases with a cadence of ~6 months, supports two minor versions of MLflow, and keeps up to date with the latest upstream version. Whenever a new version of Charmed MLflow is released, the oldest version is dropped from support. 6 | 7 | ## Reporting a Vulnerability 8 | 9 | To report a security issue, file a [Private Security Report](https://github.com/canonical/base-mlflow/security/advisories/new) with a description of the issue, the steps you took that led you to the issue, affected versions, and, if known, mitigations for the issue. 10 | The [Ubuntu Security disclosure and embargo policy](https://ubuntu.com/security/disclosure-policy) contains more information about what you can expect when you contact us and what we expect from you. 11 | -------------------------------------------------------------------------------- /.github/.jira_sync_config.yaml: -------------------------------------------------------------------------------- 1 | settings: 2 | # Jira project key to create the issue in 3 | jira_project_key: "KF" 4 | 5 | # Dictionary mapping GitHub issue status to Jira issue status 6 | status_mapping: 7 | opened: Untriaged 8 | closed: done 9 | 10 | # (Optional) GitHub labels. Only issues with one of those labels will be synchronized. 11 | # If not specified, all issues will be synchronized 12 | labels: 13 | - bug 14 | - enhancement 15 | 16 | # (Optional) (Default: false) Add a new comment in GitHub with a link to Jira created issue 17 | add_gh_comment: true 18 | 19 | # (Optional) (Default: true) Synchronize issue description from GitHub to Jira 20 | sync_description: true 21 | 22 | # (Optional) (Default: true) Synchronize comments from GitHub to Jira 23 | sync_comments: false 24 | 25 | # (Optional) (Default: None) Parent Epic key to link the issue to 26 | epic_key: "KF-4805" 27 | 28 | # (Optional) Dictionary mapping GitHub issue labels to Jira issue types. 29 | # If label on the issue is not in specified list, this issue will be created as a Bug 30 | label_mapping: 31 | enhancement: Story 32 | -------------------------------------------------------------------------------- /mlflow/tests/test_rock.py: -------------------------------------------------------------------------------- 1 | # Copyright 2024 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | 4 | import pytest 5 | import subprocess 6 | 7 | from charmed_kubeflow_chisme.rock import CheckRock 8 | 9 | 10 | @pytest.mark.abort_on_fail 11 | def test_rock(): 12 | """Test rock.""" 13 | check_rock = CheckRock("rockcraft.yaml") 14 | rock_image = check_rock.get_name() 15 | rock_version = check_rock.get_version() 16 | LOCAL_ROCK_IMAGE = f"{rock_image}:{rock_version}" 17 | 18 | # assert we have the mlflow executable installed 19 | result = subprocess.run( 20 | [ 21 | "docker", 22 | "run", 23 | "--entrypoint", 24 | "/bin/bash", 25 | LOCAL_ROCK_IMAGE, 26 | "-c", 27 | "ls -la /usr/bin/mlflow" 28 | ], 29 | stdout=subprocess.PIPE, 30 | stderr=subprocess.PIPE, 31 | text=True, 32 | check=True, 33 | ) 34 | 35 | # assert we have the correct version is installed 36 | result = subprocess.run( 37 | [ 38 | "docker", 39 | "run", 40 | "--entrypoint", 41 | "/bin/bash", 42 | LOCAL_ROCK_IMAGE, 43 | "-c", 44 | "/usr/bin/mlflow --version" 45 | ], 46 | stdout=subprocess.PIPE, 47 | stderr=subprocess.PIPE, 48 | text=True, 49 | check=True, 50 | ) 51 | assert rock_version in result.stdout 52 | 53 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/task.yaml: -------------------------------------------------------------------------------- 1 | name: Task 2 | description: File an enhancement proposal 3 | labels: "enhancement" 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: > 8 | Thanks for taking the time to fill out this enhancement 9 | proposal! Before submitting your issue, please make sure there 10 | isn't already a prior issue concerning this. If there is, 11 | please join that discussion instead. 12 | - type: textarea 13 | id: enhancement-proposal-context 14 | attributes: 15 | label: Context 16 | description: > 17 | Describe why we should work on this task/enhancement, as well as 18 | existing context we should be aware of 19 | validations: 20 | required: true 21 | - type: textarea 22 | id: enhancement-proposal-what 23 | attributes: 24 | label: What needs to get done 25 | description: > 26 | Describe what needs to get done 27 | placeholder: | 28 | 1. Look into X 29 | 2. Implement Y 30 | 3. Create file Z 31 | validations: 32 | required: true 33 | - type: textarea 34 | id: enhancement-proposal-dod 35 | attributes: 36 | label: Definition of Done 37 | description: > 38 | What are the requirements for the task to be considered done 39 | placeholder: | 40 | 1. We know how X works (spike) 41 | 2. Code is doing Y 42 | 3. Charm has functionality Z 43 | validations: 44 | required: true 45 | -------------------------------------------------------------------------------- /mlflow/rockcraft.yaml: -------------------------------------------------------------------------------- 1 | name: mlflow 2 | version: "2.22.0" 3 | summary: Base MLflow Image 4 | description: | 5 | This is base MLflow Rock image. MLflow is a platform to streamline machine learning development, 6 | including tracking experiments, packaging code into reproducible runs, and sharing and deploying 7 | models. MLflow offers a set of lightweight APIs that can be used with any existing machine 8 | learning application or library (TensorFlow, PyTorch, XGBoost, etc), wherever you currently run ML code 9 | (e.g. in notebooks, standalone applications or the cloud). Read more on the [mlflow website](https://mlflow.org/). 10 | license: Apache-2.0 11 | base: ubuntu@24.04 12 | platforms: 13 | amd64: 14 | run-user: _daemon_ 15 | 16 | services: 17 | mlflow-server: 18 | override: replace 19 | summary: "mlflow server service" 20 | command: /usr/bin/python3 21 | startup: enabled 22 | 23 | parts: 24 | security-team-requirement: 25 | plugin: nil 26 | override-build: | 27 | mkdir -p ${CRAFT_PART_INSTALL}/usr/share/rocks 28 | (echo "# os-release" && cat /etc/os-release && echo "# dpkg-query" && dpkg-query -f '${db:Status-Abbrev},${binary:Package},${Version},${source:Package},${Source:Version}\n' -W) > ${CRAFT_PART_INSTALL}/usr/share/rocks/dpkg.query 29 | 30 | mlflow: 31 | source: . 32 | plugin: python 33 | stage-packages: 34 | # This pulls the default Python interpreter for Ubuntu 24.04 -> 3.12 35 | - python3-venv 36 | - tzdata 37 | python-requirements: 38 | - requirements.txt 39 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # MLflow image 3 | [![On pull request](https://github.com/canonical/base-mlflow/actions/workflows/on_pull_request.yaml/badge.svg)](https://github.com/canonical/base-mlflow/actions/workflows/on_pull_request.yaml) 4 | [![On push](https://github.com/canonical/base-mlflow/actions/workflows/on_push.yaml/badge.svg)](https://github.com/canonical/base-mlflow/actions/workflows/on_push.yaml) 5 | 6 | This repository contains source code for Canonical's MLFlow rock image. 7 | 8 | The rock image is currently published [here](https://hub.docker.com/r/charmedkubeflow/mlflow). 9 | 10 | # MLflow rock OCI image 11 | 12 | The following tools are required to build and test the rock image: 13 | - `rockcraft` - A tool to create OCI images. 14 | - `yq` - Command-line YAML processor. 15 | - `tox` - A tool to create and set up environments to run commands in them. 16 | 17 | To install the tools: 18 | ```bash 19 | sudo snap install rockcraft --classic 20 | sudo snap install yq 21 | pip install tox 22 | ``` 23 | 24 | To build the rock image manually: 25 | ```bash 26 | cd mlflow 27 | tox -e pack 28 | ``` 29 | 30 | To use the resulting rock in Docker: 31 | ```bash 32 | tox -e export-to-docker 33 | # Find the proper tag of your image 34 | docker images 35 | ``` 36 | 37 | To test the resulting image after copying to Docker, run it: 38 | ```bash 39 | # Create a local folder to be mounted to the container 40 | mkdir mlruns 41 | # Change permissions on the folder 42 | chmod 777 mlruns 43 | 44 | # Run the server with the mounted folder 45 | docker run -p 5000:5000 -v ./mlruns:/mlruns --entrypoint=mlflow mlflow: server --host 0.0.0.0 --backend-store-uri file:///mlruns 46 | ``` 47 | 48 | Then you can visit [http://localhost:5000/](http://localhost:5000/) to access the main MLflow dashboard. 49 | -------------------------------------------------------------------------------- /mlflow/tox.ini: -------------------------------------------------------------------------------- 1 | # Copyright 2024 Canonical Ltd. 2 | # See LICENSE file for licensing details. 3 | [tox] 4 | skipsdist = True 5 | skip_missing_interpreters = True 6 | envlist = pack, export-to-docker, sanity, integration, update-requirements 7 | 8 | [testenv] 9 | setenv = 10 | PYTHONPATH={toxinidir} 11 | PYTHONBREAKPOINT=ipdb.set_trace 12 | CHARM_REPO=https://github.com/canonical/mlflow-operator.git 13 | CHARM_BRANCH=main 14 | LOCAL_CHARM_DIR=charm_repo 15 | 16 | [testenv:pack] 17 | passenv = * 18 | allowlist_externals = 19 | rockcraft 20 | commands = 21 | rockcraft pack 22 | 23 | [testenv:export-to-docker] 24 | passenv = * 25 | allowlist_externals = 26 | bash 27 | rockcraft 28 | yq 29 | commands = 30 | # export rock to docker 31 | bash -c 'NAME=$(yq eval .name rockcraft.yaml) && \ 32 | VERSION=$(yq eval .version rockcraft.yaml) && \ 33 | ARCH=$(yq eval ".platforms | keys | .[0]" rockcraft.yaml) && \ 34 | ROCK="$\{NAME\}_$\{VERSION\}_$\{ARCH\}.rock" && \ 35 | DOCKER_IMAGE=$NAME:$VERSION && \ 36 | echo "Exporting $ROCK to docker as $DOCKER_IMAGE" && \ 37 | rockcraft.skopeo --insecure-policy copy oci-archive:$ROCK docker-daemon:$DOCKER_IMAGE' 38 | 39 | [testenv:sanity] 40 | passenv = * 41 | deps = 42 | pytest 43 | charmed-kubeflow-chisme 44 | commands = 45 | # run rock tests 46 | pytest -s -v --tb native --show-capture=all --log-cli-level=INFO {posargs} {toxinidir}/tests 47 | 48 | [testenv:integration] 49 | passenv = * 50 | allowlist_externals = 51 | echo 52 | commands = 53 | # TODO: Implement integration tests here 54 | echo "WARNING: This is a placeholder test - no test is implemented here." 55 | 56 | [testenv:update-requirements] 57 | commands = 58 | pip-compile requirements.in 59 | deps = 60 | pip-tools 61 | description = Update requirements by executing pip-compile on requirements.in. 62 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug.yaml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: File a bug report 3 | labels: ["bug"] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: > 8 | Thanks for taking the time to fill out this bug report! Before submitting your issue, please make 9 | sure you are using the latest version of the charms. If not, please switch to the newest revision prior to 10 | posting your report to make sure it's not already solved. 11 | - type: textarea 12 | id: bug-description 13 | attributes: 14 | label: Bug Description 15 | description: > 16 | If applicable, add screenshots to help explain your problem. If applicable, add screenshots to 17 | help explain the problem you are facing. 18 | validations: 19 | required: true 20 | - type: textarea 21 | id: reproduction 22 | attributes: 23 | label: To Reproduce 24 | description: > 25 | Please provide a step-by-step instruction of how to reproduce the behavior. 26 | placeholder: | 27 | 1. `juju deploy ...` 28 | 2. `juju relate ...` 29 | 3. `juju status --relations` 30 | validations: 31 | required: true 32 | - type: textarea 33 | id: environment 34 | attributes: 35 | label: Environment 36 | description: > 37 | We need to know a bit more about the context in which you run the charm. 38 | - Are you running Juju locally, on lxd, in multipass or on some other platform? 39 | - What track and channel you deployed the charm from (ie. `latest/edge` or similar). 40 | - Version of any applicable components, like the juju snap, the model controller, lxd, microk8s, and/or multipass. 41 | validations: 42 | required: true 43 | - type: textarea 44 | id: logs 45 | attributes: 46 | label: Relevant Log Output 47 | description: > 48 | Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. 49 | Fetch the logs using `juju debug-log --replay` and `kubectl logs ...`. Additional details available in the juju docs 50 | at https://juju.is/docs/olm/juju-logs 51 | render: shell 52 | validations: 53 | required: true 54 | - type: textarea 55 | id: additional-context 56 | attributes: 57 | label: Additional Context 58 | -------------------------------------------------------------------------------- /mlflow/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile requirements.in 6 | # 7 | alembic==1.16.1 8 | # via mlflow 9 | annotated-types==0.7.0 10 | # via pydantic 11 | anyio==4.9.0 12 | # via starlette 13 | blinker==1.9.0 14 | # via flask 15 | boto3==1.38.27 16 | # via -r requirements.in 17 | botocore==1.38.27 18 | # via 19 | # boto3 20 | # s3transfer 21 | cachetools==5.5.2 22 | # via 23 | # google-auth 24 | # mlflow-skinny 25 | certifi==2025.4.26 26 | # via requests 27 | cffi==1.17.1 28 | # via cryptography 29 | charset-normalizer==3.4.2 30 | # via requests 31 | click==8.2.1 32 | # via 33 | # flask 34 | # mlflow-skinny 35 | # uvicorn 36 | cloudpickle==3.1.1 37 | # via mlflow-skinny 38 | contourpy==1.3.2 39 | # via matplotlib 40 | cryptography==45.0.3 41 | # via -r requirements.in 42 | cycler==0.12.1 43 | # via matplotlib 44 | databricks-sdk==0.55.0 45 | # via mlflow-skinny 46 | deprecated==1.2.18 47 | # via 48 | # opentelemetry-api 49 | # opentelemetry-semantic-conventions 50 | docker==7.1.0 51 | # via mlflow 52 | fastapi==0.115.12 53 | # via mlflow-skinny 54 | flask==3.1.1 55 | # via 56 | # mlflow 57 | # prometheus-flask-exporter 58 | fonttools==4.58.1 59 | # via matplotlib 60 | gitdb==4.0.12 61 | # via gitpython 62 | gitpython==3.1.44 63 | # via mlflow-skinny 64 | google-auth==2.40.2 65 | # via databricks-sdk 66 | graphene==3.4.3 67 | # via mlflow 68 | graphql-core==3.2.6 69 | # via 70 | # graphene 71 | # graphql-relay 72 | graphql-relay==3.2.0 73 | # via graphene 74 | greenlet==3.2.2 75 | # via sqlalchemy 76 | gunicorn==23.0.0 77 | # via mlflow 78 | h11==0.16.0 79 | # via uvicorn 80 | idna==3.10 81 | # via 82 | # anyio 83 | # requests 84 | importlib-metadata==8.6.1 85 | # via 86 | # mlflow-skinny 87 | # opentelemetry-api 88 | itsdangerous==2.2.0 89 | # via flask 90 | jinja2==3.1.6 91 | # via 92 | # flask 93 | # mlflow 94 | jmespath==1.0.1 95 | # via 96 | # boto3 97 | # botocore 98 | joblib==1.5.1 99 | # via scikit-learn 100 | kiwisolver==1.4.8 101 | # via matplotlib 102 | mako==1.3.10 103 | # via alembic 104 | markdown==3.8 105 | # via mlflow 106 | markupsafe==3.0.2 107 | # via 108 | # flask 109 | # jinja2 110 | # mako 111 | # werkzeug 112 | matplotlib==3.10.3 113 | # via mlflow 114 | mlflow==2.22.0 115 | # via -r requirements.in 116 | mlflow-skinny==2.22.0 117 | # via mlflow 118 | numpy==2.2.6 119 | # via 120 | # contourpy 121 | # matplotlib 122 | # mlflow 123 | # pandas 124 | # scikit-learn 125 | # scipy 126 | opentelemetry-api==1.33.1 127 | # via 128 | # mlflow-skinny 129 | # opentelemetry-sdk 130 | # opentelemetry-semantic-conventions 131 | opentelemetry-sdk==1.33.1 132 | # via mlflow-skinny 133 | opentelemetry-semantic-conventions==0.54b1 134 | # via opentelemetry-sdk 135 | packaging==24.2 136 | # via 137 | # gunicorn 138 | # matplotlib 139 | # mlflow-skinny 140 | pandas==2.2.3 141 | # via mlflow 142 | pillow==11.2.1 143 | # via matplotlib 144 | prometheus-client==0.22.0 145 | # via prometheus-flask-exporter 146 | prometheus-flask-exporter==0.23.2 147 | # via -r requirements.in 148 | protobuf==6.31.1 149 | # via mlflow-skinny 150 | pyarrow==19.0.1 151 | # via mlflow 152 | pyasn1==0.6.1 153 | # via 154 | # pyasn1-modules 155 | # rsa 156 | pyasn1-modules==0.4.2 157 | # via google-auth 158 | pycparser==2.22 159 | # via cffi 160 | pydantic==2.11.5 161 | # via 162 | # fastapi 163 | # mlflow-skinny 164 | pydantic-core==2.33.2 165 | # via pydantic 166 | pymysql==1.1.1 167 | # via -r requirements.in 168 | pyparsing==3.2.3 169 | # via matplotlib 170 | python-dateutil==2.9.0.post0 171 | # via 172 | # botocore 173 | # graphene 174 | # matplotlib 175 | # pandas 176 | pytz==2025.2 177 | # via pandas 178 | pyyaml==6.0.2 179 | # via mlflow-skinny 180 | requests==2.32.3 181 | # via 182 | # databricks-sdk 183 | # docker 184 | # mlflow-skinny 185 | rsa==4.9.1 186 | # via google-auth 187 | s3transfer==0.13.0 188 | # via boto3 189 | scikit-learn==1.6.1 190 | # via mlflow 191 | scipy==1.15.3 192 | # via 193 | # mlflow 194 | # scikit-learn 195 | six==1.17.0 196 | # via python-dateutil 197 | smmap==5.0.2 198 | # via gitdb 199 | sniffio==1.3.1 200 | # via anyio 201 | sqlalchemy==2.0.41 202 | # via 203 | # alembic 204 | # mlflow 205 | sqlparse==0.5.3 206 | # via mlflow-skinny 207 | starlette==0.46.2 208 | # via fastapi 209 | threadpoolctl==3.6.0 210 | # via scikit-learn 211 | typing-extensions==4.13.2 212 | # via 213 | # alembic 214 | # anyio 215 | # fastapi 216 | # graphene 217 | # mlflow-skinny 218 | # opentelemetry-sdk 219 | # pydantic 220 | # pydantic-core 221 | # sqlalchemy 222 | # typing-inspection 223 | typing-inspection==0.4.1 224 | # via pydantic 225 | tzdata==2025.2 226 | # via pandas 227 | urllib3==2.4.0 228 | # via 229 | # botocore 230 | # docker 231 | # requests 232 | uvicorn==0.34.2 233 | # via mlflow-skinny 234 | werkzeug==3.1.3 235 | # via flask 236 | wrapt==1.17.2 237 | # via deprecated 238 | zipp==3.22.0 239 | # via importlib-metadata 240 | --------------------------------------------------------------------------------