├── .dockerignore ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yaml │ └── feature_request.yml ├── PULL_REQUEST_TEMPLATE.md ├── actions │ ├── build │ │ └── action.yaml │ └── kind │ │ └── action.yml ├── build │ ├── Dockerfile │ ├── Pipfile │ ├── Pipfile.lock │ ├── README.md │ └── requirements.txt ├── resources │ ├── argo-lite │ │ └── kustomization.yaml │ ├── crds │ │ ├── buildconfig-crd.yaml │ │ ├── deploymentconfig-crd.yaml │ │ ├── imagestream-crd.yaml │ │ └── servicemonitor-crd.yaml │ ├── datasciencecluster │ │ └── datasciencecluster.yaml │ ├── dspa-lite │ │ └── dspa.yaml │ ├── external-pre-reqs │ │ ├── kustomization.yaml │ │ ├── mariadb-secret.yaml │ │ ├── minio-secret.yaml │ │ └── root-ca-configmap.yaml │ ├── kind │ │ └── kind.yaml │ ├── mariadb │ │ ├── certs-secret.yaml │ │ ├── deployment.yaml │ │ ├── kustomization.yaml │ │ ├── pvc.yaml │ │ ├── secret.yaml │ │ ├── self-signed-ca-configmap.yaml │ │ ├── service.yaml │ │ └── tls-config-configmap.yaml │ ├── minio │ │ ├── cabundle-configmap.yaml │ │ ├── certs-secret.yaml │ │ ├── deployment.yaml │ │ ├── kustomization.yaml │ │ ├── pvc.yaml │ │ ├── secret.yaml │ │ └── service.yaml │ ├── olm │ │ ├── catalogsource.yaml │ │ ├── operatorgroup.yaml │ │ └── subscription.yaml │ └── pypiserver │ │ └── base │ │ ├── kustomization.yaml │ │ ├── nginx-certs.yaml │ │ ├── nginx-configmap.yaml │ │ ├── nginx-service.yaml │ │ ├── nginx-tls-config.yaml │ │ ├── pvc.yaml │ │ ├── pypiserver.yaml │ │ └── service.yaml ├── scripts │ ├── python_package_upload │ │ ├── Dockerfile │ │ ├── package_download.sh │ │ └── package_upload_run.sh │ ├── release_create │ │ ├── create_tag_release.sh │ │ ├── notify.sh │ │ ├── validate_pr.sh │ │ └── vars.sh │ ├── release_prep │ │ ├── create_branches.sh │ │ ├── generate_pr.sh │ │ ├── prereqs.sh │ │ └── templates │ │ │ └── config.yaml │ ├── release_trigger │ │ └── upload-data.sh │ └── tests │ │ ├── README.md │ │ ├── collect_logs.sh │ │ └── tests.sh └── workflows │ ├── build-main.yml │ ├── build-prs-trigger.yaml │ ├── build-prs.yml │ ├── build-tags.yml │ ├── functests.yml │ ├── kind-integration.yml │ ├── nightly_tests.yml │ ├── precommit.yml │ ├── release_create.yaml │ ├── release_prep.yaml │ ├── release_trigger.yaml │ ├── unittests.yml │ └── upgrade-test.yml ├── .gitignore ├── .gitleaks.toml ├── .golangci.yaml ├── .pre-commit-config.yaml ├── .yamllint.yaml ├── Dockerfile ├── LICENSE ├── Makefile ├── OWNERS ├── PROJECT ├── README.md ├── api ├── v1 │ ├── dspipeline_types.go │ ├── groupversion_info.go │ └── zz_generated.deepcopy.go └── v1alpha1 │ ├── dspipeline_types.go │ ├── groupversion_info.go │ └── zz_generated.deepcopy.go ├── config ├── argo │ ├── clusterrole.argo-aggregate-to-admin.yaml │ ├── clusterrole.argo-aggregate-to-edit.yaml │ ├── clusterrole.argo-aggregate-to-view.yaml │ ├── clusterrole.argo-cluster-role.yaml │ ├── clusterrolebinding.argo-binding.yaml │ ├── configmap.workflow-controller-configmap.yaml │ ├── crd.applications.yaml │ ├── crd.clusterworkflowtemplates.yaml │ ├── crd.cronworkflows.yaml │ ├── crd.viewers.yaml │ ├── crd.workflowartifactgctasks.yaml │ ├── crd.workfloweventbinding.yaml │ ├── crd.workflows.yaml │ ├── crd.workflowtaskresult.yaml │ ├── crd.workflowtaskset.yaml │ ├── crd.workflowtemplate.yaml │ ├── deployment.workflow-controller.yaml │ ├── kustomization.yaml │ ├── params.yaml │ ├── priorityclass.yaml │ ├── role.argo.yaml │ ├── rolebinding.argo-binding.yaml │ └── serviceaccount.argo.yaml ├── base │ ├── kustomization.yaml │ ├── params.env │ └── params.yaml ├── component_metadata.yaml ├── configmaps │ ├── files │ │ └── config.yaml │ └── kustomization.yaml ├── crd │ ├── bases │ │ ├── datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml │ │ └── scheduledworkflows.yaml │ ├── external │ │ ├── monitoring.coreos.com_servicemonitors.yaml │ │ └── route.openshift.io_routes.yaml │ ├── kustomization.yaml │ └── kustomizeconfig.yaml ├── internal │ ├── apiserver │ │ ├── default │ │ │ ├── deployment.yaml.tmpl │ │ │ ├── kfp_launcher_config.yaml.tmpl │ │ │ ├── monitor.yaml.tmpl │ │ │ ├── role_ds-pipeline-user-access.yaml.tmpl │ │ │ ├── role_ds-pipeline.yaml.tmpl │ │ │ ├── role_pipeline-runner.yaml.tmpl │ │ │ ├── rolebinding_ds-pipeline.yaml.tmpl │ │ │ ├── rolebinding_pipeline-runner.yaml.tmpl │ │ │ ├── sa_ds-pipeline.yaml.tmpl │ │ │ ├── sa_pipeline-runner.yaml.tmpl │ │ │ ├── server-config.yaml.tmpl │ │ │ ├── service.ml-pipeline.yaml.tmpl │ │ │ └── service.yaml.tmpl │ │ ├── route │ │ │ └── route.yaml.tmpl │ │ └── sample-pipeline │ │ │ ├── sample-config.yaml.tmpl │ │ │ └── sample-pipeline.yaml.tmpl │ ├── common │ │ ├── default │ │ │ ├── mlmd-envoy-dashboard-access-policy.yaml.tmpl │ │ │ └── policy.yaml.tmpl │ │ └── no-owner │ │ │ └── clusterrolebinding.yaml.tmpl │ ├── devtools │ │ ├── database.secret.yaml.tmpl │ │ └── storage.secret.yaml.tmpl │ ├── mariadb │ │ ├── default │ │ │ ├── deployment.yaml.tmpl │ │ │ ├── mariadb-sa.yaml.tmpl │ │ │ ├── networkpolicy.yaml.tmpl │ │ │ ├── pvc.yaml.tmpl │ │ │ ├── service.yaml.tmpl │ │ │ └── tls-config.yaml.tmpl │ │ └── generated-secret │ │ │ └── secret.yaml.tmpl │ ├── minio │ │ ├── default │ │ │ ├── deployment.yaml.tmpl │ │ │ ├── minio-sa.yaml.tmpl │ │ │ ├── pvc.yaml.tmpl │ │ │ ├── service.minioservice.yaml.tmpl │ │ │ └── service.yaml.tmpl │ │ ├── generated-secret │ │ │ └── secret.yaml.tmpl │ │ └── route.yaml.tmpl │ ├── ml-metadata │ │ ├── grpc-service │ │ │ ├── metadata-grpc.ml-pipeline.service.yaml.tmpl │ │ │ └── metadata-grpc.service.yaml.tmpl │ │ ├── metadata-envoy.configmap.yaml.tmpl │ │ ├── metadata-envoy.deployment.yaml.tmpl │ │ ├── metadata-envoy.service.yaml.tmpl │ │ ├── metadata-envoy.serviceaccount.yaml.tmpl │ │ ├── metadata-grpc-tls-config-secret.yaml.tmpl │ │ ├── metadata-grpc.configmap.yaml.tmpl │ │ ├── metadata-grpc.deployment.yaml.tmpl │ │ ├── metadata-grpc.networkpolicy.yaml.tmpl │ │ ├── metadata-grpc.serviceaccount.yaml.tmpl │ │ └── route │ │ │ └── metadata-envoy.route.yaml.tmpl │ ├── mlpipelines-ui │ │ ├── configmap.yaml.tmpl │ │ ├── deployment.yaml.tmpl │ │ ├── role.yaml.tmpl │ │ ├── rolebinding.yaml.tmpl │ │ ├── route.yaml.tmpl │ │ ├── sa-ds-pipeline-ui.yaml.tmpl │ │ ├── sa_ds-pipelines-viewer.yaml.tmpl │ │ └── service.yaml.tmpl │ ├── persistence-agent │ │ ├── deployment.yaml.tmpl │ │ ├── role.yaml.tmpl │ │ ├── rolebinding.yaml.tmpl │ │ └── sa.yaml.tmpl │ ├── scheduled-workflow │ │ ├── deployment.yaml.tmpl │ │ ├── role.yaml.tmpl │ │ ├── rolebinding.yaml.tmpl │ │ └── sa.yaml.tmpl │ └── workflow-controller │ │ ├── configmap.yaml.tmpl │ │ ├── deployment.yaml.tmpl │ │ ├── role.yaml.tmpl │ │ ├── rolebinding.yaml.tmpl │ │ ├── sa.yaml.tmpl │ │ └── service.yaml.tmpl ├── manager │ ├── kustomization.yaml │ ├── manager-service.yaml │ └── manager.yaml ├── manifests │ └── kustomization.yaml ├── overlays │ ├── kind-tests │ │ ├── img_patch.yaml │ │ ├── kustomization.yaml │ │ ├── res_patch.yaml │ │ └── user_patch.yaml │ ├── make-deploy │ │ ├── img_patch.yaml │ │ └── kustomization.yaml │ ├── odh │ │ ├── argo │ │ │ └── kustomization.yaml │ │ ├── dspo │ │ │ └── kustomization.yaml │ │ └── kustomization.yaml │ └── rhoai │ │ ├── argo │ │ └── kustomization.yaml │ │ ├── dspo │ │ └── kustomization.yaml │ │ └── kustomization.yaml ├── prometheus │ ├── kustomization.yaml │ └── monitor.yaml ├── rbac │ ├── aggregate_dspa_role_edit.yaml │ ├── aggregate_dspa_role_view.yaml │ ├── argo_role.yaml │ ├── argo_role_binding.yaml │ ├── kustomization.yaml │ ├── leader_election_role.yaml │ ├── leader_election_role_binding.yaml │ ├── role.yaml │ ├── role_binding.yaml │ └── service_account.yaml └── samples │ ├── custom-configs │ ├── db-creds.yaml │ ├── dspa.yaml │ ├── kustomization.yaml │ ├── storage-creds.yaml │ └── ui-configmap.yaml │ ├── custom-workflow-controller-config │ ├── custom-workflow-controller-configmap.yaml │ ├── dspa.yaml │ └── kustomization.yaml │ ├── dspa-all-fields │ └── dspa_all_fields.yaml │ ├── dspa-simple │ ├── dspa_simple.yaml │ └── kustomization.yaml │ ├── dspa_healthcheck.yaml │ ├── external-object-storage │ ├── dspa.yaml │ └── kustomization.yaml │ └── local-dev │ ├── dspa.yaml │ ├── kustomization.yaml │ └── storage-creds.yaml ├── controllers ├── apiserver.go ├── apiserver_test.go ├── common.go ├── common_test.go ├── config │ ├── defaults.go │ ├── manifest.go │ └── templating.go ├── database.go ├── database_test.go ├── dspastatus │ └── dspa_status.go ├── dspipeline_controller.go ├── dspipeline_controller_func_test.go ├── dspipeline_fake_controller.go ├── dspipeline_params.go ├── dspipeline_params_test.go ├── metrics.go ├── mlmd.go ├── mlmd_test.go ├── mlpipeline_ui.go ├── mlpipeline_ui_test.go ├── persistence_agent.go ├── persistence_agent_test.go ├── scheduled_workflow.go ├── scheduled_workflow_test.go ├── storage.go ├── storage_test.go ├── suite_test.go ├── testdata │ ├── README.md │ ├── declarative │ │ ├── case_0 │ │ │ ├── config.yaml │ │ │ ├── deploy │ │ │ │ └── cr.yaml │ │ │ └── expected │ │ │ │ └── created │ │ │ │ ├── apiserver_deployment.yaml │ │ │ │ ├── configmap_server_config.yaml │ │ │ │ ├── mariadb_deployment.yaml │ │ │ │ ├── persistence-agent_deployment.yaml │ │ │ │ └── scheduled-workflow_deployment.yaml │ │ ├── case_1 │ │ │ ├── config.yaml │ │ │ ├── deploy │ │ │ │ └── cr.yaml │ │ │ └── expected │ │ │ │ └── not_created │ │ │ │ ├── apiserver_deployment.yaml │ │ │ │ ├── mariadb_deployment.yaml │ │ │ │ ├── minio_deployment.yaml │ │ │ │ ├── mlpipelines-ui_deployment.yaml │ │ │ │ ├── persistence-agent_deployment.yaml │ │ │ │ └── scheduled-workflow_deployment.yaml │ │ ├── case_2 │ │ │ ├── config.yaml │ │ │ ├── deploy │ │ │ │ └── cr.yaml │ │ │ └── expected │ │ │ │ └── created │ │ │ │ ├── apiserver_deployment.yaml │ │ │ │ ├── mariadb_deployment.yaml │ │ │ │ ├── minio_deployment.yaml │ │ │ │ ├── mlmd_envoy_deployment.yaml │ │ │ │ ├── mlmd_grpc_deployment.yaml │ │ │ │ ├── mlpipelines-ui_deployment.yaml │ │ │ │ ├── persistence-agent_deployment.yaml │ │ │ │ ├── sample-config.yaml.tmpl │ │ │ │ ├── scheduled-workflow_deployment.yaml │ │ │ │ └── workflow_deployment.yaml │ │ ├── case_3 │ │ │ ├── config.yaml │ │ │ ├── deploy │ │ │ │ ├── 00_secret.yaml │ │ │ │ ├── 01_secret.yaml │ │ │ │ └── 02_cr.yaml │ │ │ └── expected │ │ │ │ ├── created │ │ │ │ └── apiserver_deployment.yaml │ │ │ │ └── not_created │ │ │ │ ├── database_secret.yaml │ │ │ │ ├── sample-config.yaml.tmpl │ │ │ │ ├── sample-pipeline.yaml.tmpl │ │ │ │ └── storage_secret.yaml │ │ ├── case_4 │ │ │ ├── config.yaml │ │ │ ├── deploy │ │ │ │ └── 00_cr.yaml │ │ │ └── expected │ │ │ │ └── created │ │ │ │ ├── apiserver_deployment.yaml │ │ │ │ ├── mariadb_deployment.yaml │ │ │ │ ├── minio_deployment.yaml │ │ │ │ ├── mlmd_envoy_deployment.yaml │ │ │ │ ├── mlmd_grpc_deployment.yaml │ │ │ │ ├── mlpipelines-ui_deployment.yaml │ │ │ │ ├── persistence-agent_deployment.yaml │ │ │ │ └── scheduled-workflow_deployment.yaml │ │ ├── case_5 │ │ │ ├── config.yaml │ │ │ ├── deploy │ │ │ │ ├── 00_configmap.yaml │ │ │ │ ├── 01_configmap.yaml │ │ │ │ └── 02_cr.yaml │ │ │ └── expected │ │ │ │ └── created │ │ │ │ ├── apiserver_deployment.yaml │ │ │ │ ├── configmap_dspa_trusted_ca.yaml │ │ │ │ └── mariadb_deployment.yaml │ │ └── case_6 │ │ │ ├── config.yaml │ │ │ ├── deploy │ │ │ └── 00_cr.yaml │ │ │ └── expected │ │ │ └── created │ │ │ ├── apiserver_deployment.yaml │ │ │ ├── mlpipelines-ui_deployment.yaml │ │ │ └── persistence-agent_deployment.yaml │ └── tls │ │ ├── ca-bundle.crt │ │ ├── dummy-ca-bundle.crt │ │ └── empty-ca-bundle.crt ├── testutil │ ├── equalities.go │ └── util.go ├── util │ └── util.go ├── workflow_controller.go └── workflow_controller_test.go ├── datasciencecluster ├── README.md └── datasciencecluster.yaml ├── docs ├── example_pipelines │ └── iris │ │ ├── Dockerfile │ │ ├── iris-pipeline.py │ │ └── iris-pipeline.yaml ├── images │ ├── create_run.png │ ├── executed_run.png │ ├── logs.png │ ├── start_run.png │ ├── started_run.png │ ├── upload_flipcoin.png │ └── upload_pipeline.png └── release │ ├── RELEASE.md │ ├── compatibility.md │ └── compatibility.yaml ├── go.mod ├── go.sum ├── hack └── boilerplate.go.txt ├── main.go ├── scripts └── release │ ├── README.md │ ├── params.py │ ├── release.py │ ├── template │ └── version_doc.md │ └── version_doc.py └── tests ├── README.md ├── artifacts_test.go ├── dspa_v2_test.go ├── experiments_test.go ├── main.go ├── pipeline_runs_test.go ├── pipeline_test.go ├── resources ├── Dockerfile ├── dspa-external-lite.yaml ├── dspa-external.yaml ├── dspa-lite-tls.yaml ├── dspa-lite.yaml ├── dspa.yaml ├── iris_pipeline_without_cache.py ├── iris_pipeline_without_cache_compiled.yaml ├── test-pipeline-run.yaml ├── test-pipeline-with-custom-pip-server-run.yaml ├── test-pipeline-with-custom-pip-server.py └── test-pipeline.py ├── setup └── datasciencecluster_openshift.yaml ├── suite_test.go ├── upgrades └── main.sh └── util ├── resources.go └── rest.go /.dockerignore: -------------------------------------------------------------------------------- 1 | # More info: https://docs.docker.com/engine/reference/builder/#dockerignore-file 2 | # Ignore build and test binaries. 3 | bin/ 4 | testbin/ 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature request 2 | description: Suggest an idea for this project. 3 | title: "[Feature Request]: " 4 | labels: ["kind/enhancement", "priority/normal"] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | Thanks for taking the time to fill out this feature request! Please, fill this form to help us improve the project. 10 | - type: textarea 11 | id: description 12 | attributes: 13 | label: Feature description 14 | description: A clear and concise description of what you want to happen. 15 | validations: 16 | required: true 17 | - type: textarea 18 | id: describe-alternatives 19 | attributes: 20 | label: Describe alternatives you've considered 21 | description: A clear and concise description of any alternative solutions or features you've considered. 22 | placeholder: Tell us about alternatives you've considered... 23 | validations: 24 | required: false 25 | - type: textarea 26 | attributes: 27 | label: Anything else? 28 | description: | 29 | Links? References? Add any other context or screenshots about the feature request here. 30 | 31 | Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in. 32 | validations: 33 | required: false 34 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## The issue resolved by this Pull Request: 2 | Resolves # 3 | 4 | ## Description of your changes: 5 | 6 | 7 | 8 | ## Testing instructions 9 | 11 | 12 | ## Checklist 13 | - [ ] The commits are squashed in a cohesive manner and have meaningful messages. 14 | - [ ] Testing instructions have been added in the PR body (for PRs involving changes that are not immediately obvious). 15 | - [ ] The developer has manually tested the changes and verified that the changes work 16 | -------------------------------------------------------------------------------- /.github/build/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | 8 | [requires] 9 | python_version = "3.9.16" 10 | 11 | [packages] 12 | pre-commit = "~=2.17.0" 13 | -------------------------------------------------------------------------------- /.github/build/README.md: -------------------------------------------------------------------------------- 1 | # Pre-Commit Go Toolchain 2 | 3 | The artifacts in this folder are used for running pre-commit CI against Pull Requests in this repo. For the pre-commit 4 | configuration, see the `.github/workflows` directory. 5 | -------------------------------------------------------------------------------- /.github/build/requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # These requirements were autogenerated by pipenv 3 | # To regenerate from the project's Pipfile, run: 4 | # 5 | # pipenv lock --requirements 6 | # 7 | 8 | -i https://pypi.org/simple 9 | cfgv==3.3.1; python_full_version >= '3.6.1' 10 | distlib==0.3.6 11 | filelock==3.9.0; python_version >= '3.7' 12 | identify==2.5.17; python_version >= '3.7' 13 | nodeenv==1.7.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6' 14 | platformdirs==3.0.0; python_version >= '3.7' 15 | pre-commit==2.17.0 16 | pyyaml==6.0; python_version >= '3.6' 17 | toml==0.10.2; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2' 18 | virtualenv==20.19.0; python_version >= '3.7' 19 | -------------------------------------------------------------------------------- /.github/resources/argo-lite/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | namespace: opendatahub 4 | resources: 5 | - ../../../config/argo 6 | -------------------------------------------------------------------------------- /.github/resources/crds/buildconfig-crd.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | name: buildconfigs.build.openshift.io 5 | spec: 6 | group: build.openshift.io 7 | versions: 8 | - name: v1 9 | served: true 10 | storage: true 11 | schema: 12 | openAPIV3Schema: 13 | type: object 14 | properties: 15 | spec: 16 | type: object 17 | scope: Namespaced 18 | names: 19 | plural: buildconfigs 20 | singular: buildconfig 21 | kind: BuildConfig 22 | shortNames: 23 | - bc 24 | -------------------------------------------------------------------------------- /.github/resources/crds/deploymentconfig-crd.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | name: deploymentconfigs.apps.openshift.io 5 | spec: 6 | group: apps.openshift.io 7 | versions: 8 | - name: v1 9 | served: true 10 | storage: true 11 | schema: 12 | openAPIV3Schema: 13 | type: object 14 | properties: 15 | spec: 16 | type: object 17 | scope: Namespaced 18 | names: 19 | plural: deploymentconfigs 20 | singular: deploymentconfig 21 | kind: DeploymentConfig 22 | shortNames: 23 | - dc 24 | -------------------------------------------------------------------------------- /.github/resources/crds/imagestream-crd.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | name: imagestreams.image.openshift.io 5 | spec: 6 | group: image.openshift.io 7 | versions: 8 | - name: v1 9 | served: true 10 | storage: true 11 | schema: 12 | openAPIV3Schema: 13 | type: object 14 | properties: 15 | spec: 16 | type: object 17 | scope: Namespaced 18 | names: 19 | plural: imagestreams 20 | singular: imagestream 21 | kind: ImageStream 22 | shortNames: 23 | - is 24 | -------------------------------------------------------------------------------- /.github/resources/datasciencecluster/datasciencecluster.yaml: -------------------------------------------------------------------------------- 1 | kind: DataScienceCluster 2 | apiVersion: datasciencecluster.opendatahub.io/v1 3 | metadata: 4 | name: data-science-pipelines-operator 5 | spec: 6 | components: 7 | dashboard: 8 | managementState: Managed 9 | datasciencepipelines: 10 | managementState: Managed 11 | -------------------------------------------------------------------------------- /.github/resources/dspa-lite/dspa.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: test-dspa 5 | spec: 6 | dspVersion: v2 7 | apiServer: 8 | enableOauth: false 9 | resources: 10 | limits: 11 | cpu: 20m 12 | memory: 500Mi 13 | requests: 14 | cpu: 20m 15 | memory: 100m 16 | scheduledWorkflow: 17 | resources: 18 | limits: 19 | cpu: 20m 20 | memory: 500Mi 21 | requests: 22 | cpu: 20m 23 | memory: 100m 24 | persistenceAgent: 25 | resources: 26 | limits: 27 | cpu: 20m 28 | memory: 500Mi 29 | requests: 30 | cpu: 20m 31 | memory: 100Mi 32 | mlmd: 33 | deploy: true 34 | envoy: 35 | resources: 36 | limits: 37 | cpu: 20m 38 | memory: 500Mi 39 | requests: 40 | cpu: 20m 41 | memory: 100Mi 42 | grpc: 43 | resources: 44 | limits: 45 | cpu: 20m 46 | memory: 500Mi 47 | requests: 48 | cpu: 20m 49 | memory: 100Mi 50 | database: 51 | mariaDB: 52 | image: quay.io/sclorg/mariadb-105-c9s:latest 53 | pvcSize: 500Mi 54 | resources: 55 | limits: 56 | cpu: 60m 57 | memory: 500Mi 58 | requests: 59 | cpu: 60m 60 | memory: 500Mi 61 | objectStorage: 62 | minio: 63 | image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' 64 | pvcSize: 500Mi 65 | resources: 66 | limits: 67 | cpu: 20m 68 | memory: 500Mi 69 | requests: 70 | cpu: 20m 71 | memory: 100m 72 | -------------------------------------------------------------------------------- /.github/resources/external-pre-reqs/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - minio-secret.yaml 5 | - mariadb-secret.yaml 6 | - root-ca-configmap.yaml 7 | -------------------------------------------------------------------------------- /.github/resources/external-pre-reqs/mariadb-secret.yaml: -------------------------------------------------------------------------------- 1 | kind: Secret 2 | apiVersion: v1 3 | metadata: 4 | name: ds-pipeline-db-test 5 | labels: 6 | app: mariadb 7 | component: data-science-pipelines 8 | stringData: 9 | password: password 10 | type: Opaque 11 | -------------------------------------------------------------------------------- /.github/resources/external-pre-reqs/minio-secret.yaml: -------------------------------------------------------------------------------- 1 | kind: Secret 2 | apiVersion: v1 3 | metadata: 4 | name: minio 5 | stringData: 6 | accesskey: accesskey 7 | secretkey: secretkey 8 | type: Opaque 9 | -------------------------------------------------------------------------------- /.github/resources/kind/kind.yaml: -------------------------------------------------------------------------------- 1 | # --------------------------------------------------------------------------- 2 | # Copyright 2023. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # --------------------------------------------------------------------------- 16 | 17 | kind: Cluster 18 | apiVersion: kind.x-k8s.io/v1alpha4 19 | nodes: 20 | - role: control-plane 21 | image: kindest/node:v1.25.3@sha256:f52781bc0d7a19fb6c405c2af83abfeb311f130707a0e219175677e366cc45d1 22 | kubeadmConfigPatches: 23 | - | 24 | kind: InitConfiguration 25 | nodeRegistration: 26 | kubeletExtraArgs: 27 | node-labels: "ingress-ready=true" 28 | containerdConfigPatches: 29 | - |- 30 | [plugins."io.containerd.grpc.v1.cri".registry.mirrors."${REGISTRY_ADDRESS}"] 31 | endpoint = ["http://${REGISTRY_ADDRESS}"] 32 | -------------------------------------------------------------------------------- /.github/resources/mariadb/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | namespace: test-mariadb 4 | resources: 5 | - deployment.yaml 6 | - pvc.yaml 7 | - secret.yaml 8 | - service.yaml 9 | - certs-secret.yaml 10 | - self-signed-ca-configmap.yaml 11 | - tls-config-configmap.yaml 12 | -------------------------------------------------------------------------------- /.github/resources/mariadb/pvc.yaml: -------------------------------------------------------------------------------- 1 | kind: PersistentVolumeClaim 2 | apiVersion: v1 3 | metadata: 4 | name: mariadb-test 5 | spec: 6 | accessModes: 7 | - ReadWriteOnce 8 | resources: 9 | requests: 10 | storage: 500Mi 11 | volumeMode: Filesystem 12 | -------------------------------------------------------------------------------- /.github/resources/mariadb/secret.yaml: -------------------------------------------------------------------------------- 1 | kind: Secret 2 | apiVersion: v1 3 | metadata: 4 | name: ds-pipeline-db-test 5 | labels: 6 | app: mariadb 7 | component: data-science-pipelines 8 | stringData: 9 | password: password 10 | type: Opaque 11 | -------------------------------------------------------------------------------- /.github/resources/mariadb/self-signed-ca-configmap.yaml: -------------------------------------------------------------------------------- 1 | kind: ConfigMap 2 | apiVersion: v1 3 | metadata: 4 | name: self-signed-ca 5 | namespace: test-mariadb 6 | data: 7 | public.crt: | 8 | -----BEGIN CERTIFICATE----- 9 | MIIFLTCCAxWgAwIBAgIUbQHREiryzhsD5JHSCXocioe5nZQwDQYJKoZIhvcNAQEL 10 | BQAwJjELMAkGA1UEBhMCWFgxFzAVBgNVBAMMDnJoLWRzcC1kZXZzLmlvMB4XDTI0 11 | MDQxNjIxMTgzOFoXDTM0MDQxNDIxMTgzOFowJjELMAkGA1UEBhMCWFgxFzAVBgNV 12 | BAMMDnJoLWRzcC1kZXZzLmlvMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC 13 | AgEA+KcyWD91S2IPiFYhqlvoPV929moRx+FZV1BO4eukYWd56mckKhw7sSlnFNNP 14 | FC/HnyROh5dKeNuP/qJi4VPfXRGqaDr2tGbuFanGnCRy4vtfgjWMT7/NzbZTzw9w 15 | 8KiTqG0E1rxIU/FpLSWHgo+u+uOLZ2MoJd09+QSwZWPqDTeR2Kc8/3H+wSwmJrbk 16 | 9XleOdttbDL1R+RWscNcnLyLX2/BqXmPE+ALHH+hFINmHbpm+D5GAjubUqlSyJjN 17 | rvzrgnxoyPjmWzX6dyYxEz/WioUvc6c3UsDhH7KCoZcxpmNDjvwP2VVIeSAaRSBi 18 | EEI69CXSDUGBKKBU2sTkr7Os6HEMS8zpVtStS7kOXth4wBFhJxVC17DlAUy3lBFe 19 | MoSgw4rpVmiSqH75JlseUjdScuDcs6V+gkSTYXj19N2b4nE9Go229nGGvw2GLevN 20 | VsT2bjZq8QURRnToiA/ATdm3T8HXsYUBvRdNt+h47spb9dgGsGqlrKeKYer3xngD 21 | OkE8VrrvL8FJQD1YKeOpi6qtJAoUOBY/XMaD0buNGH0M0CCfts1nTbRqcNIWUXka 22 | hZT8uY/0bqQIt7ELJYJK1LopFWdco6NeDMchYNu823rCTouRoUZE1+pEaJU/WBd1 23 | chkeRtrppcppaCfOpfg/dvnMrwbGLGBTdRo1Adej2Nv7aFsCAwEAAaNTMFEwHQYD 24 | VR0OBBYEFJdLV+QLhHJtjRHGa3uSR5CY1PhgMB8GA1UdIwQYMBaAFJdLV+QLhHJt 25 | jRHGa3uSR5CY1PhgMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIB 26 | ANTD/yGpKOP7OlJLh61cBpP7lq5GNP3z5+BPq0UU8noVmiSkfG8DUBX0nCd1FFX3 27 | 87cS+1knp6pW1SooxGD2xzmGxrlxeoltHlU1WuC1U2GSSlZJnBcfMzk0nWxNT6jk 28 | 4K7sKOuk+0tUQ6v+qPpydZDiC3Pq7bPqqHz0wyw0SWRFBIL/UML+MOBxMMza8kZ6 29 | ucVSNdoaPKOvCcXxFsoD8f/ncOufBeW3JZtL832A60aSw8BEzxhm/t1gZHpiyxea 30 | iNtE0IDy3+yte7J5g210n4tCxN4x3+nDAsiZ7O+ckcJwVRcT37hrQDVJkbih3e6Q 31 | nlyf4OCRGXtOD6KvWhR7jgVdKMlnyBDzs9D8BfdI/99ijU/utJDsLySZsJE1lCuk 32 | xOo44kbPvXs49Cr39DMW1YcS4COzqLamhQqpLiiqOYlqKud8UlCQ3kMFVQLqrTCz 33 | 21z3i7MlIyRAoqPw3n5M9YWW2M7Oo48xRFTchbEnAk9ARlzGBJipwEUDDQZ7gMDp 34 | JAsM2Fbu3Up3eyakUBjKYuWdmshm5QrCWzUiYGMuVgKarwFN4zV393KjtsqtGfBJ 35 | eFWo51LzoK7sH3vq+zCSAGzHkHVEkMmn3mTJtMghmguX/vwRpxUEKmExwvJ6qvMo 36 | Oza6GUl11howSA7rNnesv+brJOFJqAS48B1G7Mrl04od 37 | -----END CERTIFICATE----- 38 | -------------------------------------------------------------------------------- /.github/resources/mariadb/service.yaml: -------------------------------------------------------------------------------- 1 | kind: Service 2 | apiVersion: v1 3 | metadata: 4 | name: mariadb 5 | namespace: test-mariadb 6 | labels: 7 | app: mariadb 8 | component: data-science-pipelines 9 | spec: 10 | ports: 11 | - port: 3306 12 | protocol: TCP 13 | targetPort: 3306 14 | selector: 15 | app: mariadb 16 | component: data-science-pipelines 17 | -------------------------------------------------------------------------------- /.github/resources/mariadb/tls-config-configmap.yaml: -------------------------------------------------------------------------------- 1 | kind: ConfigMap 2 | apiVersion: v1 3 | metadata: 4 | name: tls-config 5 | namespace: test-mariadb 6 | managedFields: 7 | - manager: kubectl-create 8 | operation: Update 9 | apiVersion: v1 10 | data: 11 | z-custom-my.cnf: | 12 | [mariadb] 13 | ssl_cert = /.mariadb/certs/public.crt 14 | ssl_key = /.mariadb/certs/private.key 15 | ssl_capath = /.mariadb/certs/CAs 16 | require_secure_transport = on 17 | -------------------------------------------------------------------------------- /.github/resources/minio/cabundle-configmap.yaml: -------------------------------------------------------------------------------- 1 | kind: ConfigMap 2 | apiVersion: v1 3 | metadata: 4 | annotations: 5 | service.beta.openshift.io/inject-cabundle: 'true' 6 | name: config-service-cabundle 7 | namespace: test-minio 8 | data: 9 | service-ca.crt: | 10 | -----BEGIN CERTIFICATE----- 11 | MIIDUTCCAjmgAwIBAgIIfb5THW7z1V0wDQYJKoZIhvcNAQELBQAwNjE0MDIGA1UE 12 | Awwrb3BlbnNoaWZ0LXNlcnZpY2Utc2VydmluZy1zaWduZXJAMTcwNDgzNTI5MDAe 13 | Fw0yNDAxMDkyMTIxMjlaFw0yNjAzMDkyMTIxMzBaMDYxNDAyBgNVBAMMK29wZW5z 14 | aGlmdC1zZXJ2aWNlLXNlcnZpbmctc2lnbmVyQDE3MDQ4MzUyOTAwggEiMA0GCSqG 15 | SIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9CkH6LdzFqMSKNFAxPfsPJAXJekUJjts4 16 | I85PcL8bQpAFnswZoHyjfBbtw9QdLBFFxAS/3mPH9oTgBFYZY9sQmKXbdcjbQZ0C 17 | LlqLIlK3yDoRg/NLBY0Sv0mbOdDrESaz2T0/HUkalM1e5+zuQluNy8MY+ysBuJjJ 18 | CpmeGEpCNf7SfsP1j9VHpjFkBgMqCmLI0oKUH/Ez9MjwrA7yQnquW6a3QbNlm1T/ 19 | UyulJejBdHuktwiEMuDc9BNumfGNU/OPV1zo5hz8WCpVunoXKEXqHODc29MxY3o9 20 | gBdeA4/dEJqsIbyBdqnphVS0duX7u+po+0GAaoVANMhcW48OGY0NAgMBAAGjYzBh 21 | MA4GA1UdDwEB/wQEAwICpDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTsuxk/ 22 | WaxmQIo6OUsYm1ks6jAUBDAfBgNVHSMEGDAWgBTsuxk/WaxmQIo6OUsYm1ks6jAU 23 | BDANBgkqhkiG9w0BAQsFAAOCAQEAdKkXhYTpJNTrlp0S8UIjLwAHTqWUQOHMoiPm 24 | LYTzkWCA+yIhcwhw8Y+AbV6/hNEkQdkgVI+cexz1rkGm9Jxz1OL00mGunWgUMgJr 25 | umu4qZnyS2ErpeaMqOZMKpeQmiwkwnT1OUdUAI8kJN21tQ+uD6F47QseJwdKTMc/ 26 | YeZ7Zyo9LYJMWGKKlYYM/u7PDPwHZxT8v+15SXqR72eBM1kWNdVB5NeUK2bLxUok 27 | 1RcDfiLDiVy3ctZqm4Tx4WTr/uRxsss0ctLdRvltF+kSgdAQFympeFgTF7MGsoft 28 | OIfz84aEwS656SHPKBaqgPNYrnseSrpxtc30pfs7GUPG+FqFOQ== 29 | -----END CERTIFICATE----- 30 | -------------------------------------------------------------------------------- /.github/resources/minio/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | namespace: test-minio 4 | resources: 5 | - deployment.yaml 6 | - service.yaml 7 | - pvc.yaml 8 | - secret.yaml 9 | - certs-secret.yaml 10 | - cabundle-configmap.yaml 11 | -------------------------------------------------------------------------------- /.github/resources/minio/pvc.yaml: -------------------------------------------------------------------------------- 1 | kind: PersistentVolumeClaim 2 | apiVersion: v1 3 | metadata: 4 | name: minio 5 | spec: 6 | accessModes: 7 | - ReadWriteOnce 8 | resources: 9 | requests: 10 | storage: 500Mi 11 | volumeMode: Filesystem 12 | -------------------------------------------------------------------------------- /.github/resources/minio/secret.yaml: -------------------------------------------------------------------------------- 1 | kind: Secret 2 | apiVersion: v1 3 | metadata: 4 | name: minio 5 | stringData: 6 | accesskey: accesskey 7 | secretkey: secretkey 8 | type: Opaque 9 | -------------------------------------------------------------------------------- /.github/resources/minio/service.yaml: -------------------------------------------------------------------------------- 1 | kind: Service 2 | apiVersion: v1 3 | metadata: 4 | name: minio 5 | spec: 6 | ports: 7 | - name: https 8 | protocol: TCP 9 | port: 9000 10 | targetPort: 9000 11 | - name: console 12 | protocol: TCP 13 | port: 9001 14 | targetPort: 9001 15 | selector: 16 | app: minio 17 | -------------------------------------------------------------------------------- /.github/resources/olm/catalogsource.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: operators.coreos.com/v1alpha1 2 | kind: CatalogSource 3 | metadata: 4 | name: odh-olm-test 5 | namespace: olm 6 | spec: 7 | displayName: '' 8 | grpcPodConfig: 9 | securityContextConfig: restricted 10 | image: "${CATALOG_BASE_IMG}" 11 | publisher: '' 12 | sourceType: grpc 13 | -------------------------------------------------------------------------------- /.github/resources/olm/operatorgroup.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: operators.coreos.com/v1 2 | kind: OperatorGroup 3 | metadata: 4 | name: openshift-operators 5 | namespace: openshift-operators 6 | -------------------------------------------------------------------------------- /.github/resources/olm/subscription.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: operators.coreos.com/v1alpha1 2 | kind: Subscription 3 | metadata: 4 | name: opendatahub-operator 5 | namespace: openshift-operators 6 | spec: 7 | channel: rolling 8 | installPlanApproval: Automatic 9 | name: opendatahub-operator 10 | source: odh-olm-test 11 | sourceNamespace: olm 12 | -------------------------------------------------------------------------------- /.github/resources/pypiserver/base/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - pvc.yaml 5 | - nginx-configmap.yaml 6 | - nginx-certs.yaml 7 | - service.yaml 8 | - nginx-service.yaml 9 | - pypiserver.yaml 10 | -------------------------------------------------------------------------------- /.github/resources/pypiserver/base/nginx-configmap.yaml: -------------------------------------------------------------------------------- 1 | kind: ConfigMap 2 | apiVersion: v1 3 | metadata: 4 | name: nginx-config 5 | data: 6 | nginx.conf: | 7 | user www-data; 8 | worker_processes auto; 9 | error_log /tmp/error.log; 10 | pid /tmp/nginx.pid; 11 | 12 | 13 | events { 14 | worker_connections 1024; 15 | } 16 | 17 | http { 18 | include /etc/nginx/conf.d/*.conf; 19 | 20 | upstream pypi { 21 | server pypi-server.test-pypiserver.svc.cluster.local:8080; 22 | } 23 | 24 | server { 25 | listen 8081 default_server; 26 | server_name localhost; 27 | 28 | # Redirect HTTP traffic to HTTPS 29 | return 301 https://$host$request_uri; 30 | } 31 | 32 | server { 33 | listen 8443 ssl; 34 | server_name localhost; 35 | 36 | ssl_certificate /etc/nginx/ssl/server.crt; 37 | ssl_certificate_key /etc/nginx/ssl/private.key; 38 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2; 39 | ssl_ciphers HIGH:!aNULL:!MD5; 40 | 41 | 42 | location / { 43 | proxy_set_header Host $host:$server_port; 44 | proxy_set_header X-Forwarded-Proto $scheme; 45 | proxy_set_header X-Real-IP $remote_addr; 46 | proxy_temp_path /tmp/proxy_temp; 47 | proxy_set_header Connection ""; 48 | proxy_pass http://pypi; 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /.github/resources/pypiserver/base/nginx-service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: nginx-service 5 | spec: 6 | selector: 7 | app: pypi-server 8 | ports: 9 | - name: http 10 | protocol: TCP 11 | port: 80 12 | targetPort: 8081 13 | - name: https 14 | protocol: TCP 15 | port: 443 16 | targetPort: 8443 17 | -------------------------------------------------------------------------------- /.github/resources/pypiserver/base/nginx-tls-config.yaml: -------------------------------------------------------------------------------- 1 | kind: ConfigMap 2 | apiVersion: v1 3 | metadata: 4 | name: nginx-tls-config 5 | data: 6 | rootCA.crt: | 7 | -----BEGIN CERTIFICATE----- 8 | MIIFbzCCA1egAwIBAgIUFKdc74xyqqXfOwbL1uQhI3Pknl4wDQYJKoZIhvcNAQEL 9 | BQAwRzELMAkGA1UEBhMCWFgxODA2BgNVBAMML25naW54LXNlcnZpY2UudGVzdC1w 10 | eXBpc2VydmVyLnN2Yy5jbHVzdGVyLmxvY2FsMB4XDTI0MDUyMjE5NTEwNloXDTM0 11 | MDUyMDE5NTEwNlowRzELMAkGA1UEBhMCWFgxODA2BgNVBAMML25naW54LXNlcnZp 12 | Y2UudGVzdC1weXBpc2VydmVyLnN2Yy5jbHVzdGVyLmxvY2FsMIICIjANBgkqhkiG 13 | 9w0BAQEFAAOCAg8AMIICCgKCAgEAyr25llsejYGe0tt8VHNYt7cs1uzdMBmASEUq 14 | 4u0kX2lK34pojQJLsuPQtOQGDJQQLASKM1KQJC5uGN3co4qa8dpxJ0H0mj7eW+qQ 15 | p+iJZxnXh7GFSfG1FqKLcBO2fL8htbA3lAPXV2EK95Hln6X+fSLUCNmECwWdRodv 16 | P5VB3kicwFxVBpUO0NLHvhjpIF3X1e0Bx/6JjP0JsukBW1UJdJ0xM9v92Wk4UrY3 17 | nBFWHX+qtb43YXP+sMrtLyFheZnDoyI76SrsNthnDZa9vq6FfcLj872MuvWITGbY 18 | er6o+CNu1oGi5cub5FC3CMAdmX/p3nmWdvhotHaxYeOCk9+B+16B1+VCu+v8JAhu 19 | C3/gsnOXN+jiQhmJv+JiM1+Ys7Aua1oscQ33zK6jSI/1QIefptmI7LwkTwvsdf1u 20 | SXW2ScfiL1F3C852jgPAZ95qen2zCoE77WO23t5b+6UKdjyhuHxtQIARueB5VtBZ 21 | 2OAYkd/fqBPztsWCoTBu+I1x0+dJgaPNu6NCkeGMAqqTSJ1i+VY2BJMDXVsUIad5 22 | 08Sa6NBZ07dQjtSZJkS9XfSmCbvRa+FxWrwtoMU/MNIag+MkbNJySWoqLlkzuDH/ 23 | Kr2zxro5vHfCRamKeNxNyqXth7cM/97twxbeD/6C5PBljFZpDdZoUqz6EkmYWm7T 24 | 4t6I7NUCAwEAAaNTMFEwHQYDVR0OBBYEFO6X2DNhcLbAcigRtOPwGAjmvUjCMB8G 25 | A1UdIwQYMBaAFO6X2DNhcLbAcigRtOPwGAjmvUjCMA8GA1UdEwEB/wQFMAMBAf8w 26 | DQYJKoZIhvcNAQELBQADggIBAH9KMG0OOQCkN74Cg/v7LGufNC9XoY0qu4zd6q9P 27 | 4k+X8jA/YD8W48RZUNx7kjfsS+qIwh1dtQRENbWJ7bhlBR2M2SvQd2AeVIBTuZi4 28 | 2HWHO8MWurnujyWwBfKbZJI3NnE2QjDq/3VZCjSScJIf5eBxXZHLx4Q8u19cNOMD 29 | C+hgCpfcaYIWEhkwxNt1ExrCYjEttslaMRrDj3CkYN4cojBIm+0L3OoTuRCyvK1l 30 | EAQwM18sz+Yi3+94cBwl4tM1yAtslGwvE4bmi+mCkCKp9CrNKMaVXPcZeCtLmKFr 31 | lea6at75a/DpM/SUhUuWWer5JszRTqpEHe8BTGDt76EE68ftHMOlCwvZxmUnR1Li 32 | CkbAB3lfI9zAU992byik/uhY8a0ARUVMZgBIndZ965l+LxgX1FAIZ6kBvcCWUHj5 33 | erIHFyFZ5oZzVywbztBy9tPJbstUbCMz0WrAzYdaxZProOXUYgHWXrlgHwhjd6ch 34 | C8ygyW3sbEESsWccktrK5mtqTByIPpFHtlrbtzawduWjgG7b7cCx0U4Z5PkQPvy7 35 | xjer0x1IPjI+1dMCrEzJprj3ZXCW2S7QaxKKEW6Fd1IOIGx8M4dWonA9Tllqo2Tm 36 | 5SmlLt8bcvieAqqX+u75Cb6l06PYsiRJvQAGgchvzkkMHc66+bPCDGmM2nCY0ZtU 37 | O09x 38 | -----END CERTIFICATE----- 39 | -------------------------------------------------------------------------------- /.github/resources/pypiserver/base/pvc.yaml: -------------------------------------------------------------------------------- 1 | kind: PersistentVolumeClaim 2 | apiVersion: v1 3 | metadata: 4 | name: pypi-datastore 5 | spec: 6 | accessModes: 7 | - ReadWriteOnce 8 | resources: 9 | requests: 10 | storage: 500Mi 11 | volumeMode: Filesystem 12 | -------------------------------------------------------------------------------- /.github/resources/pypiserver/base/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: pypi-server 5 | spec: 6 | selector: 7 | app: pypi-server 8 | ports: 9 | - name: pypi-server 10 | port: 8080 11 | protocol: TCP 12 | targetPort: 8080 13 | -------------------------------------------------------------------------------- /.github/scripts/python_package_upload/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/python:3.9 2 | 3 | # Set working directory 4 | WORKDIR /app 5 | 6 | # Copy the script into the container 7 | COPY package_download.sh /app/package_download.sh 8 | 9 | # Make sure the script is executable 10 | RUN chmod +x /app/package_download.sh 11 | 12 | # Store the files in a folder 13 | VOLUME /app/packages 14 | 15 | ENTRYPOINT ["/app/package_download.sh"] 16 | -------------------------------------------------------------------------------- /.github/scripts/python_package_upload/package_download.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | # Download packages 6 | for package in "kfp==2.11.0" "numpy"; do 7 | pip download $package -d packages --only-binary=:none: 8 | done 9 | -------------------------------------------------------------------------------- /.github/scripts/python_package_upload/package_upload_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | CONTAINER_CLI="${CONTAINER_CLI:-docker}" 6 | RUN_PKG_UPLOADER_IN_CONTAINER="${RUN_PKG_UPLOADER_IN_CONTAINER:-true}" 7 | 8 | mkdir -p /tmp/packages 9 | 10 | if [ "$RUN_PKG_UPLOADER_IN_CONTAINER" = "true" ]; then 11 | echo "Running uploader in container..." 12 | $CONTAINER_CLI rm package_upload_run || true 13 | $CONTAINER_CLI build -t package_upload . 14 | $CONTAINER_CLI run --name package_upload_run -v /tmp/packages:/app/packages package_upload 15 | else 16 | echo "Running uploader..." 17 | ./package_download.sh 18 | mv packages /tmp 19 | fi 20 | 21 | # Print the pods in the namespace 22 | kubectl -n test-pypiserver get pods 23 | 24 | pod_name=$(kubectl -n test-pypiserver get pod | grep pypi | awk '{print $1}') 25 | 26 | # Copy packages 27 | for entry in /tmp/packages/*; do 28 | kubectl -n test-pypiserver cp "$entry" $pod_name:/opt/app-root/packages 29 | done 30 | -------------------------------------------------------------------------------- /.github/scripts/release_create/create_tag_release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -ex 4 | 5 | echo "Create a tag release for ${TARGET_VERSION_TAG} in ${REPOSITORY}" 6 | 7 | RELEASE_REPO_DIR=$(dirname ${WORKING_DIR})/repo_dir 8 | git clone \ 9 | --depth=1 \ 10 | --branch=${RELEASE_BRANCH} \ 11 | https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${REPOSITORY} \ 12 | ${RELEASE_REPO_DIR} 13 | cd ${RELEASE_REPO_DIR} 14 | 15 | gh release create ${TARGET_VERSION_TAG} --target ${RELEASE_BRANCH} --generate-notes --notes-start-tag ${PREVIOUS_VERSION_TAG} 16 | 17 | cat <> /tmp/release-notes.md 18 | 19 | This is a release comprising of multiple repos: 20 | * DSP component for ${TARGET_VERSION_TAG} can be found [here](https://github.com/${GH_ORG}/data-science-pipelines/releases/tag/${TARGET_VERSION_TAG}) 21 | * DSPO component for ${TARGET_VERSION_TAG} can be found [here](https://github.com/${GH_ORG}/data-science-pipelines-operator/releases/tag/${TARGET_VERSION_TAG}) 22 | 23 | Version Table for components can be found [here](https://github.com/${GH_ORG}/data-science-pipelines-operator/blob/main/docs/release/compatibility.md) 24 | EOF 25 | 26 | echo "$(gh release view ${TARGET_VERSION_TAG} --json body --jq .body)" >> /tmp/release-notes.md 27 | 28 | echo "Release notes to be created:" 29 | cat /tmp/release-notes.md 30 | 31 | gh release edit ${TARGET_VERSION_TAG} --notes-file /tmp/release-notes.md 32 | rm /tmp/release-notes.md 33 | -------------------------------------------------------------------------------- /.github/scripts/release_create/notify.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -ex 4 | 5 | cat <> /tmp/body-file.txt 6 | Release created successfully: 7 | 8 | https://github.com/${GH_ORG}/data-science-pipelines-operator/releases/tag/${TARGET_VERSION_TAG} 9 | 10 | https://github.com/${GH_ORG}/data-science-pipelines/releases/tag/${TARGET_VERSION_TAG} 11 | EOF 12 | 13 | gh pr comment ${PR_NUMBER} --body-file /tmp/body-file.txt 14 | 15 | echo "::notice:: DSPO Release: https://github.com/${GH_ORG}/data-science-pipelines-operator/releases/tag/${TARGET_VERSION_TAG}" 16 | echo "::notice:: DSP Release: https://github.com/${GH_ORG}/data-science-pipelines/releases/tag/${TARGET_VERSION_TAG}" 17 | echo "::notice:: Feedback sent to PR." 18 | -------------------------------------------------------------------------------- /.github/scripts/release_create/validate_pr.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -ex 4 | 5 | echo "::notice:: Performing Release PR Validation for: ${PR_NUMBER}" 6 | 7 | # Retrieve PR Author: 8 | PR_AUTHOR=$(gh pr view ${PR_NUMBER} --json author -q .author.login) 9 | 10 | echo "Current OWNERS:" 11 | cat ./OWNERS 12 | 13 | echo "::notice:: Checking if PR author ${PR_AUTHOR} is DSPO Owner..." 14 | 15 | is_owner=$(cat ./OWNERS | var=${PR_AUTHOR} yq '[.approvers] | contains([env(var)])') 16 | if [[ $is_owner == "false" ]]; then 17 | echo "::error:: PR author ${PR_AUTHOR} is not an approver in OWNERS file. Only approvers can create releases." 18 | exit 1 19 | fi 20 | 21 | echo "::notice:: PR author ${PR_AUTHOR} is an approver in DSPO OWNERS." 22 | 23 | echo "::notice:: Validation successful." 24 | -------------------------------------------------------------------------------- /.github/scripts/release_create/vars.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -ex 4 | 5 | cat ./config.yaml 6 | target_version_tag=$(yq .target_version_tag ./config.yaml) 7 | previous_version_tag=$(yq .previous_release_tag ./config.yaml) 8 | release_branch=$(yq .release_branch ./config.yaml) 9 | odh_org=$(yq .odh_org ./config.yaml) 10 | pr_number=$(cat ./pr_number) 11 | 12 | echo "pr_number=${pr_number}" >> $GITHUB_OUTPUT 13 | echo "target_version_tag=${target_version_tag}" >> $GITHUB_OUTPUT 14 | echo "previous_version_tag=${previous_version_tag}" >> $GITHUB_OUTPUT 15 | echo "release_branch=${release_branch}" >> $GITHUB_OUTPUT 16 | echo "odh_org=${odh_org}" >> $GITHUB_OUTPUT 17 | -------------------------------------------------------------------------------- /.github/scripts/release_prep/create_branches.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -ex 4 | 5 | echo "Cut branch ${MINOR_RELEASE_BRANCH} from main/master" 6 | 7 | echo "Current branches in ${DSPO_REPOSITORY_FULL}" 8 | git branch -r 9 | 10 | git checkout -B ${MINOR_RELEASE_BRANCH} 11 | git push origin ${MINOR_RELEASE_BRANCH} 12 | echo "::notice:: Created DSPO ${MINOR_RELEASE_BRANCH} branch" 13 | 14 | echo "Current branches in ${DSP_REPOSITORY_FULL}" 15 | DSP_DIR=$(dirname ${WORKING_DIR})/data-science-pipelines 16 | git clone \ 17 | --depth=1 \ 18 | --branch=master \ 19 | https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${DSP_REPOSITORY_FULL} \ 20 | ${DSP_DIR} 21 | pushd ${DSP_DIR} 22 | git checkout -B ${MINOR_RELEASE_BRANCH} 23 | git push origin ${MINOR_RELEASE_BRANCH} 24 | echo "::notice:: Created DSP ${MINOR_RELEASE_BRANCH} branch" 25 | popd 26 | 27 | echo "Current branches in ${DSP_PIPELINES_REPOSITORY_FULL}" 28 | DSP_PIPELINES_DIR=$(dirname ${WORKING_DIR})/ilab-on-ocp 29 | git clone \ 30 | --depth=1 \ 31 | --branch=main \ 32 | https://${GH_USER_NAME}:${GH_TOKEN}@github.com/${DSP_PIPELINES_REPOSITORY_FULL} \ 33 | ${DSP_PIPELINES_DIR} 34 | pushd ${DSP_PIPELINES_DIR} 35 | git checkout -B ${MINOR_RELEASE_BRANCH} 36 | git push origin ${MINOR_RELEASE_BRANCH} 37 | echo "::notice:: Created DSP Pipelines ${MINOR_RELEASE_BRANCH} branch" 38 | popd 39 | -------------------------------------------------------------------------------- /.github/scripts/release_prep/templates/config.yaml: -------------------------------------------------------------------------------- 1 | odh_org: placeholder 2 | release_branch: placeholder 3 | target_version_tag: placeholder 4 | previous_release_tag: placeholder 5 | -------------------------------------------------------------------------------- /.github/scripts/release_trigger/upload-data.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -ex 3 | set -o pipefail 4 | 5 | mkdir -p ./pr 6 | 7 | cat <> /tmp/body-file-raw.txt 8 | ${PR_BODY} 9 | EOF 10 | 11 | sed -n '/^```yaml/,/^```/ p' < /tmp/body-file-raw.txt | sed '/^```/ d' > ./pr/config.yaml 12 | echo Parsed config from PR body: 13 | yq ./pr/config.yaml 14 | 15 | # Also store pr details 16 | echo ${PR_NUMBER} >> ./pr/pr_number 17 | echo ${PR_STATE} >> ./pr/pr_state 18 | echo ${PR_HEAD_SHA} >> ./pr/head_sha 19 | -------------------------------------------------------------------------------- /.github/scripts/tests/collect_logs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | DSPA_NS="" 6 | DSPO_NS="" 7 | 8 | while [[ "$#" -gt 0 ]]; do 9 | case $1 in 10 | --dspa-ns) DSPA_NS="$2"; shift ;; 11 | --dspo-ns) DSPO_NS="$2"; shift ;; 12 | *) echo "Unknown parameter passed: $1"; exit 1 ;; 13 | esac 14 | shift 15 | done 16 | 17 | if [[ -z "$DSPA_NS" || -z "$DSPO_NS" ]]; then 18 | echo "Both --dspa-ns and --dspo-ns parameters are required." 19 | exit 1 20 | fi 21 | 22 | function check_namespace { 23 | if ! kubectl get namespace "$1" &>/dev/null; then 24 | echo "Namespace '$1' does not exist." 25 | exit 1 26 | fi 27 | } 28 | 29 | function display_pod_info { 30 | local NAMESPACE=$1 31 | local POD_NAMES 32 | 33 | POD_NAMES=$(kubectl -n "${DSPA_NS}" get pods -o custom-columns=":metadata.name") 34 | 35 | if [[ -z "${POD_NAMES}" ]]; then 36 | echo "No pods found in namespace '${NAMESPACE}'." 37 | return 38 | fi 39 | 40 | for POD_NAME in ${POD_NAMES}; do 41 | echo "===== Pod: ${POD_NAME} in ${NAMESPACE} =====" 42 | 43 | echo "----- EVENTS -----" 44 | kubectl describe pod "${POD_NAME}" -n "${NAMESPACE}" | grep -A 100 Events || echo "No events found for pod ${POD_NAME}." 45 | 46 | echo "----- LOGS -----" 47 | kubectl logs "${POD_NAME}" -n "${NAMESPACE}" || echo "No logs found for pod ${POD_NAME}." 48 | 49 | echo "===========================" 50 | echo "" 51 | done 52 | } 53 | 54 | function collect_workflow_info { 55 | local NAMESPACE=$1 56 | 57 | echo "===== Collecting Argo Workflows in ${NAMESPACE} " 58 | # List all workflows 59 | kubectl -n "${NAMESPACE}" get workflows || echo "No workflows found in namespace '${NAMESPACE}'." 60 | 61 | # Display detailed workflow YAML 62 | kubectl -n "${NAMESPACE}" get workflow -o yaml || echo "Failed to retrieve workflows in '${NAMESPACE}'." 63 | 64 | echo "=====================================================" 65 | echo "" 66 | } 67 | 68 | check_namespace "$DSPA_NS" 69 | check_namespace "$DSPO_NS" 70 | 71 | display_pod_info "$DSPA_NS" 72 | display_pod_info "$DSPO_NS" 73 | 74 | # Collect Argo Workflows for DSPA namespace 75 | collect_workflow_info "$DSPA_NS" 76 | -------------------------------------------------------------------------------- /.github/workflows/build-main.yml: -------------------------------------------------------------------------------- 1 | name: Build images for Main branch 2 | on: 3 | push: 4 | branches: 5 | - main 6 | concurrency: 7 | group: ${{ github.workflow }} 8 | cancel-in-progress: true 9 | env: 10 | IMAGE_REPO_DSPO: data-science-pipelines-operator 11 | QUAY_ORG: opendatahub 12 | QUAY_ID: ${{ secrets.QUAY_ID }} 13 | QUAY_TOKEN: ${{ secrets.QUAY_TOKEN }} 14 | SOURCE_BRANCH: main 15 | jobs: 16 | build-image: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v3 20 | - name: Generate Tag 21 | shell: bash 22 | id: tags 23 | run: | 24 | commit_sha=${{ github.event.after }} 25 | tag=main-${commit_sha:0:7} 26 | echo "tag=${tag}" >> $GITHUB_OUTPUT 27 | - name: Build Image 28 | uses: ./.github/actions/build 29 | env: 30 | IMG: quay.io/${{ env.QUAY_ORG }}/${{ env.IMAGE_REPO_DSPO }}:${{ steps.tags.outputs.tag }} 31 | TARGET_IMAGE_TAG: ${{ steps.tags.outputs.tag }} 32 | with: 33 | OVERWRITE: true 34 | IMAGE_REPO: ${{ env.IMAGE_REPO_DSPO }} 35 | DOCKERFILE: Dockerfile 36 | GH_REPO: ${{ github.repository }} 37 | - name: Tag latest 38 | shell: bash 39 | env: 40 | IMG: quay.io/${{ env.QUAY_ORG }}/${{ env.IMAGE_REPO_DSPO }} 41 | NEWEST_TAG: ${{ steps.tags.outputs.tag }} 42 | run: | 43 | podman tag ${IMG}:${NEWEST_TAG} ${IMG}:latest 44 | podman push ${IMG}:latest 45 | podman tag ${IMG}:${NEWEST_TAG} ${IMG}:main 46 | podman push ${IMG}:main 47 | -------------------------------------------------------------------------------- /.github/workflows/build-prs-trigger.yaml: -------------------------------------------------------------------------------- 1 | name: Trigger build images for PRs 2 | on: 3 | pull_request: 4 | paths: 5 | - .github/workflows/build-prs-trigger.yaml 6 | - go.mod 7 | - go.sum 8 | - controllers/** 9 | - api/** 10 | - config/** 11 | types: 12 | - opened 13 | - reopened 14 | - closed 15 | - synchronize 16 | concurrency: 17 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} 18 | cancel-in-progress: true 19 | jobs: 20 | upload-data: 21 | runs-on: ubuntu-latest 22 | steps: 23 | - uses: actions/checkout@v2 24 | - name: Save PR payload 25 | shell: bash 26 | run: | 27 | mkdir -p ./pr 28 | echo ${{ github.event.pull_request.number }} >> ./pr/pr_number 29 | echo ${{ github.event.pull_request.state }} >> ./pr/pr_state 30 | echo ${{ github.event.pull_request.head.sha }} >> ./pr/head_sha 31 | echo ${{ github.event.action }} >> ./pr/event_action 32 | - uses: actions/upload-artifact@v4 33 | with: 34 | name: pr 35 | path: pr/ 36 | -------------------------------------------------------------------------------- /.github/workflows/functests.yml: -------------------------------------------------------------------------------- 1 | name: Functional Tests 2 | on: 3 | pull_request: 4 | jobs: 5 | functest: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v3 9 | - name: Setup Go 10 | uses: actions/setup-go@v4 11 | with: 12 | go-version: '1.21.x' 13 | - name: Run Functional Tests 14 | env: 15 | SSL_CERT_FILE: ${{ github.workspace }}/controllers/testdata/tls/ca-bundle.crt 16 | run: make functest 17 | -------------------------------------------------------------------------------- /.github/workflows/kind-integration.yml: -------------------------------------------------------------------------------- 1 | name: KinD DSPO/DSP Integration Tests 2 | 3 | on: 4 | workflow_dispatch: 5 | pull_request: 6 | paths: 7 | - go.mod 8 | - go.sum 9 | - controllers/** 10 | - api/** 11 | - config/** 12 | - tests/** 13 | - .github/resources/** 14 | - .github/actions/** 15 | - '.github/workflows/kind-integration.yml' 16 | - '.github/scripts/tests/tests.sh' 17 | - '.github/scripts/python_package_upload/**' 18 | - Makefile 19 | types: 20 | - opened 21 | - reopened 22 | - closed 23 | - synchronize 24 | 25 | concurrency: 26 | group: ${{ github.head_ref }}-${{ github.workflow }} 27 | cancel-in-progress: true 28 | 29 | env: 30 | GIT_WORKSPACE: ${{ github.workspace }} 31 | 32 | jobs: 33 | dspo-tests: 34 | runs-on: ubuntu-latest 35 | timeout-minutes: 60 36 | 37 | steps: 38 | - uses: actions/checkout@v3 39 | with: 40 | fetch-depth: 0 41 | 42 | - name: Set up Go 43 | uses: actions/setup-go@v1 44 | with: 45 | go-version: '1.21.x' 46 | id: go 47 | 48 | - name: Setup and start KinD cluster 49 | uses: ./.github/actions/kind 50 | 51 | - name: Run test 52 | id: test 53 | working-directory: ${{ github.workspace }}/.github/scripts/tests 54 | run: | 55 | sh tests.sh --kind 56 | continue-on-error: true 57 | 58 | - name: Collect events and logs 59 | if: steps.test.outcome != 'success' 60 | working-directory: ${{ github.workspace }}/.github/scripts/tests 61 | run: | 62 | ./collect_logs.sh --dspa-ns test-dspa --dspo-ns opendatahub 63 | ./collect_logs.sh --dspa-ns dspa-ext --dspo-ns opendatahub 64 | exit 1 65 | -------------------------------------------------------------------------------- /.github/workflows/nightly_tests.yml: -------------------------------------------------------------------------------- 1 | name: Nightly Testing 2 | on: 3 | workflow_dispatch: {} 4 | schedule: 5 | - cron: 0 0 * * * 6 | jobs: 7 | nightly: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v3 11 | - name: Setup Go 12 | uses: actions/setup-go@v4 13 | with: 14 | go-version: '1.19.x' 15 | - name: Run Build 16 | run: make build 17 | - name: Run Unit Tests 18 | run: make unittest 19 | - name: Run Functional Tests 20 | run: make functest 21 | # TODO: implement some sort of notification mechanism 22 | -------------------------------------------------------------------------------- /.github/workflows/precommit.yml: -------------------------------------------------------------------------------- 1 | name: Pre-commit 2 | on: 3 | pull_request: 4 | jobs: 5 | precommit: 6 | runs-on: ubuntu-latest 7 | container: 8 | image: quay.io/opendatahub/pre-commit-go-toolchain:v0.5 9 | env: 10 | XDG_CACHE_HOME: /cache 11 | GOCACHE: /cache/go-build 12 | GOMODCACHE: /cache/go-mod 13 | PRE_COMMIT_HOME: /cache/pre-commit 14 | volumes: 15 | - /cache 16 | steps: 17 | - uses: actions/checkout@v3 18 | - name: Activate cache 19 | uses: actions/cache@v4 20 | with: 21 | path: /cache 22 | key: ${{ runner.os }}-cache-${{ hashFiles('**/go.sum', '.pre-commit-config.yaml') }} 23 | - name: Mark source directory as safe 24 | run: git config --global --add safe.directory $GITHUB_WORKSPACE 25 | - name: Run pre-commit checks 26 | run: pre-commit run --all-files 27 | -------------------------------------------------------------------------------- /.github/workflows/release_trigger.yaml: -------------------------------------------------------------------------------- 1 | name: "Release Trigger Create" # This is used by release_create.yaml on.workflow_run.workflows, change with caution 2 | on: 3 | pull_request: 4 | types: 5 | - closed 6 | paths: 7 | - config/base/params.env 8 | concurrency: 9 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} 10 | cancel-in-progress: true 11 | jobs: 12 | upload-data: 13 | runs-on: ubuntu-latest 14 | if: contains(github.event.pull_request.labels.*.name, 'release-automation') && github.event.pull_request.merged 15 | steps: 16 | - uses: actions/checkout@v3 17 | - name: Save PR payload 18 | shell: bash 19 | env: 20 | PR_BODY: ${{github.event.pull_request.body}} 21 | PR_NUMBER: ${{ github.event.pull_request.number }} 22 | PR_STATE: ${{ github.event.pull_request.state }} 23 | PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} 24 | run: ./.github/scripts/release_trigger/upload-data.sh 25 | - uses: actions/upload-artifact@v4 26 | with: 27 | name: pr 28 | path: pr/ 29 | -------------------------------------------------------------------------------- /.github/workflows/unittests.yml: -------------------------------------------------------------------------------- 1 | name: Unit Tests 2 | on: 3 | pull_request: 4 | jobs: 5 | unittest: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v3 9 | - name: Setup Go 10 | uses: actions/setup-go@v4 11 | with: 12 | go-version: '1.21.x' 13 | - name: Run Unit Tests 14 | run: make unittest 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Binaries for programs and plugins 3 | *.exe 4 | *.exe~ 5 | *.dll 6 | *.so 7 | *.dylib 8 | bin 9 | testbin/* 10 | Dockerfile.cross 11 | 12 | # Test binary, build with `go test -c` 13 | *.test 14 | 15 | # Output of the go coverage tool, specifically when used with LiteIDE 16 | *.out 17 | 18 | # Kubernetes Generated files - skip generated files, except for vendored files 19 | 20 | !vendor/**/zz_generated.* 21 | 22 | # editor and IDE paraphernalia 23 | .idea 24 | *.swp 25 | *.swo 26 | *~ 27 | .odo 28 | *.code-workspace 29 | *.vscode 30 | 31 | .DS_Store 32 | 33 | # Byte-compiled / optimized / DLL files 34 | __pycache__/ 35 | *.py[cod] 36 | *$py.class 37 | -------------------------------------------------------------------------------- /.gitleaks.toml: -------------------------------------------------------------------------------- 1 | [allowlist] 2 | description = "Allowlist for files and paths" 3 | files = [ 4 | ".github/resources/mariadb/certs-secret.yaml", 5 | ".github/resources/mariadb/self-signed-ca-configmap.yaml", 6 | ".github/resources/mariadb/tls-config-configmap.yaml", 7 | ".github/resources/minio/certs-secret.yaml", 8 | ".github/resources/minio/cabundle-configmap.yaml", 9 | ".github/resources/tls/root-ca-configmap.yaml", 10 | ".github/resources/pypiserver/nginx-certs.yaml", 11 | ".github/resources/pypiserver/nginx-tls-config.yaml" 12 | ] 13 | -------------------------------------------------------------------------------- /.golangci.yaml: -------------------------------------------------------------------------------- 1 | run: 2 | timeout: 5m 3 | linters: 4 | enable: 5 | - errcheck 6 | - gosimple 7 | - govet 8 | - ineffassign 9 | - staticcheck 10 | - typecheck 11 | - unused 12 | - revive 13 | issues: 14 | exclude-rules: 15 | - linters: 16 | - staticcheck 17 | path: controllers/dspipeline_params.go 18 | text: SA1019 # exclude failures for deprecated warnings 19 | linters-settings: 20 | revive: 21 | rules: 22 | - name: dot-imports 23 | severity: warning 24 | disabled: true 25 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v3.3.0 4 | hooks: 5 | - id: trailing-whitespace 6 | exclude: README.md 7 | - id: check-merge-conflict 8 | - id: end-of-file-fixer 9 | exclude: controllers/testdata/tls 10 | - id: check-added-large-files 11 | - id: check-case-conflict 12 | - id: check-json 13 | - id: check-symlinks 14 | - id: detect-private-key 15 | 16 | - repo: https://github.com/adrienverge/yamllint.git 17 | rev: v1.25.0 18 | hooks: 19 | - id: yamllint 20 | files: \.(yaml|yml)$ 21 | types: [file, yaml] 22 | entry: yamllint --strict -c .yamllint.yaml 23 | 24 | - repo: https://github.com/dnephin/pre-commit-golang 25 | rev: c17f835cf9 26 | hooks: 27 | - id: go-fmt 28 | - id: golangci-lint 29 | - id: go-build 30 | - id: go-mod-tidy 31 | -------------------------------------------------------------------------------- /.yamllint.yaml: -------------------------------------------------------------------------------- 1 | extends: default 2 | rules: 3 | line-length: disable 4 | document-start: disable 5 | indentation: 6 | indent-sequences: whatever 7 | comments-indentation: disable 8 | hyphens: 9 | max-spaces-after: 4 10 | truthy: 11 | check-keys: false 12 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Build the manager binary 2 | FROM registry.access.redhat.com/ubi9/go-toolset:1.22 as builder 3 | ARG TARGETOS 4 | ARG TARGETARCH 5 | 6 | WORKDIR /workspace 7 | # Copy the Go Modules manifests 8 | COPY go.mod go.mod 9 | COPY go.sum go.sum 10 | # cache deps before building and copying source so that we don't need to re-download as much 11 | # and so that source changes don't invalidate our downloaded layer 12 | RUN go mod download 13 | 14 | # Copy the go source 15 | COPY main.go main.go 16 | COPY api/ api/ 17 | COPY controllers/ controllers/ 18 | 19 | # Build 20 | # the GOARCH has not a default value to allow the binary be built according to the host where the command 21 | # was called. For example, if we call make docker-build in a local env which has the Apple Silicon M1 SO 22 | # the docker BUILDPLATFORM arg will be linux/arm64 when for Apple x86 it will be linux/amd64. Therefore, 23 | # by leaving it empty we can ensure that the container and binary shipped on it will have the same platform. 24 | USER root 25 | RUN CGO_ENABLED=1 GOOS=${TARGETOS:-linux} GOARCH=${TARGETARCH:-amd64} GO111MODULE=on GOEXPERIMENT=strictfipsruntime go build -tags strictfipsruntime -a -o manager main.go 26 | 27 | FROM registry.access.redhat.com/ubi9/ubi-minimal:latest 28 | WORKDIR / 29 | COPY --from=builder /workspace/manager . 30 | COPY config/internal config/internal 31 | 32 | ENTRYPOINT ["/manager"] 33 | -------------------------------------------------------------------------------- /OWNERS: -------------------------------------------------------------------------------- 1 | approvers: 2 | - anishasthana 3 | - DharmitD 4 | - dsp-developers 5 | - gmfrasca 6 | - hbelmiro 7 | - HumairAK 8 | - rimolive 9 | - mprahl 10 | reviewers: 11 | - DharmitD 12 | - gmfrasca 13 | - hbelmiro 14 | - HumairAK 15 | - rimolive 16 | - VaniHaripriya 17 | - mprahl 18 | emeritus_approvers: 19 | - accorvin 20 | - harshad16 21 | -------------------------------------------------------------------------------- /PROJECT: -------------------------------------------------------------------------------- 1 | domain: opendatahub.io 2 | layout: 3 | - go.kubebuilder.io/v3 4 | plugins: 5 | manifests.sdk.operatorframework.io/v2: {} 6 | scorecard.sdk.operatorframework.io/v2: {} 7 | projectName: data-science-pipelines-operator 8 | repo: github.com/opendatahub-io/data-science-pipelines-operator 9 | resources: 10 | - api: 11 | crdVersion: v1 12 | namespaced: true 13 | controller: true 14 | domain: opendatahub.io 15 | group: datasciencepipelinesapplications 16 | kind: DataSciencePipelinesApplication 17 | path: github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1 18 | version: v1alpha1 19 | version: "3" 20 | -------------------------------------------------------------------------------- /api/v1/groupversion_info.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2023. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | // Package v1 contains API Schema definitions for the datasciencepipelinesapplications v1 API group 18 | // +kubebuilder:object:generate=true 19 | // +groupName=datasciencepipelinesapplications.opendatahub.io 20 | package v1 21 | 22 | import ( 23 | "k8s.io/apimachinery/pkg/runtime/schema" 24 | "sigs.k8s.io/controller-runtime/pkg/scheme" 25 | ) 26 | 27 | var ( 28 | // GroupVersion is group version used to register these objects 29 | GroupVersion = schema.GroupVersion{Group: "datasciencepipelinesapplications.opendatahub.io", Version: "v1"} 30 | 31 | // SchemeBuilder is used to add go types to the GroupVersionKind scheme 32 | SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} 33 | 34 | // AddToScheme adds the types in this group-version to the given scheme. 35 | AddToScheme = SchemeBuilder.AddToScheme 36 | ) 37 | -------------------------------------------------------------------------------- /api/v1alpha1/groupversion_info.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2023. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | // Package v1alpha1 contains API Schema definitions for the datasciencepipelinesapplications v1alpha1 API group 18 | // +kubebuilder:object:generate=true 19 | // +groupName=datasciencepipelinesapplications.opendatahub.io 20 | package v1alpha1 21 | 22 | import ( 23 | "k8s.io/apimachinery/pkg/runtime/schema" 24 | "sigs.k8s.io/controller-runtime/pkg/scheme" 25 | ) 26 | 27 | var ( 28 | // GroupVersion is group version used to register these objects 29 | GroupVersion = schema.GroupVersion{Group: "datasciencepipelinesapplications.opendatahub.io", Version: "v1alpha1"} 30 | 31 | // SchemeBuilder is used to add go types to the GroupVersionKind scheme 32 | SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} 33 | 34 | // AddToScheme adds the types in this group-version to the given scheme. 35 | AddToScheme = SchemeBuilder.AddToScheme 36 | ) 37 | -------------------------------------------------------------------------------- /config/argo/clusterrole.argo-aggregate-to-admin.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | kind: ClusterRole 4 | metadata: 5 | labels: 6 | rbac.authorization.k8s.io/aggregate-to-admin: "true" 7 | name: argo-aggregate-to-admin 8 | annotations: 9 | internal.kpt.dev/upstream-identifier: "rbac.authorization.k8s.io|ClusterRole|default|argo-aggregate-to-admin" 10 | rules: 11 | - apiGroups: 12 | - argoproj.io 13 | resources: 14 | - workflows 15 | - workflows/finalizers 16 | - workfloweventbindings 17 | - workfloweventbindings/finalizers 18 | - workflowtemplates 19 | - workflowtemplates/finalizers 20 | - cronworkflows 21 | - cronworkflows/finalizers 22 | - clusterworkflowtemplates 23 | - clusterworkflowtemplates/finalizers 24 | - workflowtasksets 25 | - workflowtasksets/finalizers 26 | - workflowtaskresults 27 | - workflowtaskresults/finalizers 28 | verbs: 29 | - create 30 | - delete 31 | - deletecollection 32 | - get 33 | - list 34 | - patch 35 | - update 36 | - watch 37 | -------------------------------------------------------------------------------- /config/argo/clusterrole.argo-aggregate-to-edit.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | kind: ClusterRole 4 | metadata: 5 | labels: 6 | rbac.authorization.k8s.io/aggregate-to-edit: "true" 7 | name: argo-aggregate-to-edit 8 | annotations: 9 | internal.kpt.dev/upstream-identifier: "rbac.authorization.k8s.io|ClusterRole|default|argo-aggregate-to-edit" 10 | rules: 11 | - apiGroups: 12 | - argoproj.io 13 | resources: 14 | - workflows 15 | - workflows/finalizers 16 | - workfloweventbindings 17 | - workfloweventbindings/finalizers 18 | - workflowtemplates 19 | - workflowtemplates/finalizers 20 | - cronworkflows 21 | - cronworkflows/finalizers 22 | - clusterworkflowtemplates 23 | - clusterworkflowtemplates/finalizers 24 | - workflowtaskresults 25 | - workflowtaskresults/finalizers 26 | verbs: 27 | - create 28 | - delete 29 | - deletecollection 30 | - get 31 | - list 32 | - patch 33 | - update 34 | - watch 35 | -------------------------------------------------------------------------------- /config/argo/clusterrole.argo-aggregate-to-view.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | kind: ClusterRole 4 | metadata: 5 | labels: 6 | rbac.authorization.k8s.io/aggregate-to-view: "true" 7 | name: argo-aggregate-to-view 8 | annotations: 9 | internal.kpt.dev/upstream-identifier: "rbac.authorization.k8s.io|ClusterRole|default|argo-aggregate-to-view" 10 | rules: 11 | - apiGroups: 12 | - argoproj.io 13 | resources: 14 | - workflows 15 | - workflows/finalizers 16 | - workfloweventbindings 17 | - workfloweventbindings/finalizers 18 | - workflowtemplates 19 | - workflowtemplates/finalizers 20 | - cronworkflows 21 | - cronworkflows/finalizers 22 | - clusterworkflowtemplates 23 | - clusterworkflowtemplates/finalizers 24 | - workflowtaskresults 25 | - workflowtaskresults/finalizers 26 | verbs: 27 | - get 28 | - list 29 | - watch 30 | -------------------------------------------------------------------------------- /config/argo/clusterrolebinding.argo-binding.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | kind: ClusterRoleBinding 4 | metadata: 5 | name: argo-binding 6 | roleRef: 7 | apiGroup: rbac.authorization.k8s.io 8 | kind: ClusterRole 9 | name: argo-cluster-role 10 | subjects: 11 | - kind: ServiceAccount 12 | name: argo 13 | namespace: argo 14 | -------------------------------------------------------------------------------- /config/argo/configmap.workflow-controller-configmap.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ConfigMap 4 | metadata: 5 | name: workflow-controller-configmap 6 | namespace: argo 7 | -------------------------------------------------------------------------------- /config/argo/crd.clusterworkflowtemplates.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | annotations: 5 | internal.kpt.dev/upstream-identifier: apiextensions.k8s.io|CustomResourceDefinition|default|clusterworkflowtemplates.argoproj.io 6 | name: clusterworkflowtemplates.argoproj.io 7 | spec: 8 | group: argoproj.io 9 | names: 10 | kind: ClusterWorkflowTemplate 11 | listKind: ClusterWorkflowTemplateList 12 | plural: clusterworkflowtemplates 13 | shortNames: 14 | - clusterwftmpl 15 | - cwft 16 | singular: clusterworkflowtemplate 17 | scope: Cluster 18 | versions: 19 | - name: v1alpha1 20 | schema: 21 | openAPIV3Schema: 22 | properties: 23 | apiVersion: 24 | type: string 25 | kind: 26 | type: string 27 | metadata: 28 | type: object 29 | spec: 30 | type: object 31 | x-kubernetes-map-type: atomic 32 | x-kubernetes-preserve-unknown-fields: true 33 | required: 34 | - metadata 35 | - spec 36 | type: object 37 | served: true 38 | storage: true 39 | -------------------------------------------------------------------------------- /config/argo/crd.cronworkflows.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | annotations: 5 | internal.kpt.dev/upstream-identifier: apiextensions.k8s.io|CustomResourceDefinition|default|cronworkflows.argoproj.io 6 | name: cronworkflows.argoproj.io 7 | spec: 8 | group: argoproj.io 9 | names: 10 | kind: CronWorkflow 11 | listKind: CronWorkflowList 12 | plural: cronworkflows 13 | shortNames: 14 | - cwf 15 | - cronwf 16 | singular: cronworkflow 17 | scope: Namespaced 18 | versions: 19 | - name: v1alpha1 20 | schema: 21 | openAPIV3Schema: 22 | properties: 23 | apiVersion: 24 | type: string 25 | kind: 26 | type: string 27 | metadata: 28 | type: object 29 | spec: 30 | type: object 31 | x-kubernetes-map-type: atomic 32 | x-kubernetes-preserve-unknown-fields: true 33 | status: 34 | type: object 35 | x-kubernetes-map-type: atomic 36 | x-kubernetes-preserve-unknown-fields: true 37 | required: 38 | - metadata 39 | - spec 40 | type: object 41 | served: true 42 | storage: true 43 | -------------------------------------------------------------------------------- /config/argo/crd.viewers.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | labels: 5 | kubeflow/crd-install: "true" 6 | name: viewers.kubeflow.org 7 | spec: 8 | group: kubeflow.org 9 | names: 10 | kind: Viewer 11 | listKind: ViewerList 12 | plural: viewers 13 | shortNames: 14 | - vi 15 | singular: viewer 16 | scope: Namespaced 17 | versions: 18 | - name: v1beta1 19 | schema: 20 | openAPIV3Schema: 21 | properties: 22 | apiVersion: 23 | type: string 24 | kind: 25 | type: string 26 | metadata: 27 | type: object 28 | spec: 29 | type: object 30 | x-kubernetes-map-type: atomic 31 | x-kubernetes-preserve-unknown-fields: true 32 | required: 33 | - spec 34 | type: object 35 | served: true 36 | storage: true 37 | -------------------------------------------------------------------------------- /config/argo/crd.workflowartifactgctasks.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | name: workflowartifactgctasks.argoproj.io 5 | spec: 6 | group: argoproj.io 7 | names: 8 | kind: WorkflowArtifactGCTask 9 | listKind: WorkflowArtifactGCTaskList 10 | plural: workflowartifactgctasks 11 | shortNames: 12 | - wfat 13 | singular: workflowartifactgctask 14 | scope: Namespaced 15 | versions: 16 | - name: v1alpha1 17 | schema: 18 | openAPIV3Schema: 19 | properties: 20 | apiVersion: 21 | type: string 22 | kind: 23 | type: string 24 | metadata: 25 | type: object 26 | spec: 27 | type: object 28 | x-kubernetes-map-type: atomic 29 | x-kubernetes-preserve-unknown-fields: true 30 | status: 31 | type: object 32 | x-kubernetes-map-type: atomic 33 | x-kubernetes-preserve-unknown-fields: true 34 | required: 35 | - metadata 36 | - spec 37 | type: object 38 | served: true 39 | storage: true 40 | subresources: 41 | status: {} 42 | -------------------------------------------------------------------------------- /config/argo/crd.workfloweventbinding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | annotations: 5 | internal.kpt.dev/upstream-identifier: apiextensions.k8s.io|CustomResourceDefinition|default|workfloweventbindings.argoproj.io 6 | name: workfloweventbindings.argoproj.io 7 | spec: 8 | group: argoproj.io 9 | names: 10 | kind: WorkflowEventBinding 11 | listKind: WorkflowEventBindingList 12 | plural: workfloweventbindings 13 | shortNames: 14 | - wfeb 15 | singular: workfloweventbinding 16 | scope: Namespaced 17 | versions: 18 | - name: v1alpha1 19 | schema: 20 | openAPIV3Schema: 21 | properties: 22 | apiVersion: 23 | type: string 24 | kind: 25 | type: string 26 | metadata: 27 | type: object 28 | spec: 29 | type: object 30 | x-kubernetes-map-type: atomic 31 | x-kubernetes-preserve-unknown-fields: true 32 | required: 33 | - metadata 34 | - spec 35 | type: object 36 | served: true 37 | storage: true 38 | -------------------------------------------------------------------------------- /config/argo/crd.workflows.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | annotations: 5 | internal.kpt.dev/upstream-identifier: "apiextensions.k8s.io|CustomResourceDefinition|default|workflows.argoproj.io" 6 | name: workflows.argoproj.io 7 | spec: 8 | group: argoproj.io 9 | names: 10 | kind: Workflow 11 | listKind: WorkflowList 12 | plural: workflows 13 | shortNames: 14 | - wf 15 | singular: workflow 16 | scope: Namespaced 17 | versions: 18 | - additionalPrinterColumns: 19 | - description: Status of the workflow 20 | jsonPath: .status.phase 21 | name: Status 22 | type: string 23 | - description: When the workflow was started 24 | format: date-time 25 | jsonPath: .status.startedAt 26 | name: Age 27 | type: date 28 | - description: Human readable message indicating details about why the workflow is in this condition. 29 | jsonPath: .status.message 30 | name: Message 31 | type: string 32 | name: v1alpha1 33 | schema: 34 | openAPIV3Schema: 35 | properties: 36 | apiVersion: 37 | type: string 38 | kind: 39 | type: string 40 | metadata: 41 | type: object 42 | spec: 43 | type: object 44 | x-kubernetes-map-type: atomic 45 | x-kubernetes-preserve-unknown-fields: true 46 | status: 47 | type: object 48 | x-kubernetes-map-type: atomic 49 | x-kubernetes-preserve-unknown-fields: true 50 | required: 51 | - metadata 52 | - spec 53 | type: object 54 | served: true 55 | storage: true 56 | subresources: {} 57 | -------------------------------------------------------------------------------- /config/argo/crd.workflowtaskset.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | annotations: 5 | internal.kpt.dev/upstream-identifier: apiextensions.k8s.io|CustomResourceDefinition|default|workflowtasksets.argoproj.io 6 | name: workflowtasksets.argoproj.io 7 | spec: 8 | group: argoproj.io 9 | names: 10 | kind: WorkflowTaskSet 11 | listKind: WorkflowTaskSetList 12 | plural: workflowtasksets 13 | shortNames: 14 | - wfts 15 | singular: workflowtaskset 16 | scope: Namespaced 17 | versions: 18 | - name: v1alpha1 19 | schema: 20 | openAPIV3Schema: 21 | properties: 22 | apiVersion: 23 | type: string 24 | kind: 25 | type: string 26 | metadata: 27 | type: object 28 | spec: 29 | type: object 30 | x-kubernetes-map-type: atomic 31 | x-kubernetes-preserve-unknown-fields: true 32 | status: 33 | type: object 34 | x-kubernetes-map-type: atomic 35 | x-kubernetes-preserve-unknown-fields: true 36 | required: 37 | - metadata 38 | - spec 39 | type: object 40 | served: true 41 | storage: true 42 | subresources: 43 | status: {} 44 | -------------------------------------------------------------------------------- /config/argo/crd.workflowtemplate.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | annotations: 5 | internal.kpt.dev/upstream-identifier: apiextensions.k8s.io|CustomResourceDefinition|default|workflowtemplates.argoproj.io 6 | name: workflowtemplates.argoproj.io 7 | spec: 8 | group: argoproj.io 9 | names: 10 | kind: WorkflowTemplate 11 | listKind: WorkflowTemplateList 12 | plural: workflowtemplates 13 | shortNames: 14 | - wftmpl 15 | singular: workflowtemplate 16 | scope: Namespaced 17 | versions: 18 | - name: v1alpha1 19 | schema: 20 | openAPIV3Schema: 21 | properties: 22 | apiVersion: 23 | type: string 24 | kind: 25 | type: string 26 | metadata: 27 | type: object 28 | spec: 29 | type: object 30 | x-kubernetes-map-type: atomic 31 | x-kubernetes-preserve-unknown-fields: true 32 | required: 33 | - metadata 34 | - spec 35 | type: object 36 | served: true 37 | storage: true 38 | -------------------------------------------------------------------------------- /config/argo/deployment.workflow-controller.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: workflow-controller 6 | namespace: argo 7 | spec: 8 | selector: 9 | matchLabels: 10 | app: workflow-controller 11 | template: 12 | metadata: 13 | labels: 14 | app: workflow-controller 15 | spec: 16 | containers: 17 | - args: 18 | - --configmap 19 | - workflow-controller-configmap 20 | - --executor-image 21 | - gcr.io/ml-pipeline/argoexec:v3.4.16-license-compliance 22 | - --namespaced 23 | command: 24 | - workflow-controller 25 | env: 26 | - name: LEADER_ELECTION_IDENTITY 27 | valueFrom: 28 | fieldRef: 29 | apiVersion: v1 30 | fieldPath: metadata.name 31 | # image: quay.io/argoproj/workflow-controller:v3.4.12 32 | image: gcr.io/ml-pipeline/workflow-controller:v3.4.16-license-compliance 33 | livenessProbe: 34 | failureThreshold: 3 35 | httpGet: 36 | path: /healthz 37 | port: 6060 38 | initialDelaySeconds: 90 39 | periodSeconds: 60 40 | timeoutSeconds: 30 41 | name: workflow-controller 42 | ports: 43 | - containerPort: 9090 44 | name: metrics 45 | - containerPort: 6060 46 | securityContext: 47 | allowPrivilegeEscalation: false 48 | capabilities: 49 | drop: 50 | - ALL 51 | readOnlyRootFilesystem: true 52 | runAsNonRoot: true 53 | nodeSelector: 54 | kubernetes.io/os: linux 55 | priorityClassName: workflow-controller 56 | securityContext: 57 | runAsNonRoot: true 58 | serviceAccountName: argo 59 | -------------------------------------------------------------------------------- /config/argo/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | namespace: argo 4 | resources: 5 | # Deploy Argo Controller and Server 6 | # TODO: Only deploy server (not WC?) 7 | # - https://github.com/argoproj/argo-workflows/releases/download/v3.4.12/install.yaml 8 | 9 | # The following manifests are used in ALL Executors 10 | - clusterrole.argo-aggregate-to-admin.yaml 11 | - clusterrole.argo-aggregate-to-edit.yaml 12 | - clusterrole.argo-aggregate-to-view.yaml 13 | - clusterrole.argo-cluster-role.yaml 14 | - clusterrolebinding.argo-binding.yaml 15 | - configmap.workflow-controller-configmap.yaml 16 | # - deployment.workflow-controller.yaml 17 | # - priorityclass.yaml 18 | - role.argo.yaml 19 | - rolebinding.argo-binding.yaml 20 | - serviceaccount.argo.yaml 21 | 22 | # CRDs only needed for PNS executors 23 | - crd.applications.yaml 24 | - crd.clusterworkflowtemplates.yaml 25 | - crd.cronworkflows.yaml 26 | - crd.viewers.yaml 27 | - crd.workflowartifactgctasks.yaml 28 | - crd.workfloweventbinding.yaml 29 | - crd.workflows.yaml 30 | - crd.workflowtaskresult.yaml 31 | - crd.workflowtaskset.yaml 32 | - crd.workflowtemplate.yaml 33 | -------------------------------------------------------------------------------- /config/argo/params.yaml: -------------------------------------------------------------------------------- 1 | varReference: 2 | - path: spec/template/spec/containers/image 3 | kind: Deployment 4 | - path: data 5 | kind: ConfigMap 6 | -------------------------------------------------------------------------------- /config/argo/priorityclass.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: scheduling.k8s.io/v1 3 | kind: PriorityClass 4 | metadata: 5 | annotations: 6 | internal.kpt.dev/upstream-identifier: scheduling.k8s.io|PriorityClass|default|workflow-controller 7 | labels: 8 | application-crd-id: kubeflow-pipelines 9 | name: ds-pipelines-workflow-controller-priorityclass 10 | value: 1000000 11 | -------------------------------------------------------------------------------- /config/argo/role.argo.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | kind: Role 4 | metadata: 5 | name: argo-role 6 | annotations: 7 | internal.kpt.dev/upstream-identifier: 'rbac.authorization.k8s.io|Role|default|argo-role' 8 | rules: 9 | - apiGroups: 10 | - coordination.k8s.io 11 | resources: 12 | - leases 13 | verbs: 14 | - create 15 | - get 16 | - update 17 | - apiGroups: 18 | - "" 19 | resources: 20 | - pods 21 | - pods/exec 22 | verbs: 23 | - create 24 | - get 25 | - list 26 | - watch 27 | - update 28 | - patch 29 | - delete 30 | - apiGroups: 31 | - "" 32 | resources: 33 | - configmaps 34 | verbs: 35 | - get 36 | - watch 37 | - list 38 | - apiGroups: 39 | - "" 40 | resources: 41 | - persistentvolumeclaims 42 | - persistentvolumeclaims/finalizers 43 | verbs: 44 | - create 45 | - update 46 | - delete 47 | - get 48 | - apiGroups: 49 | - argoproj.io 50 | resources: 51 | - workflows 52 | - workflows/finalizers 53 | - workflowtasksets 54 | - workflowtasksets/finalizers 55 | - workflowartifactgctasks 56 | verbs: 57 | - get 58 | - list 59 | - watch 60 | - update 61 | - patch 62 | - delete 63 | - create 64 | - apiGroups: 65 | - argoproj.io 66 | resources: 67 | - workflowtemplates 68 | - workflowtemplates/finalizers 69 | verbs: 70 | - get 71 | - list 72 | - watch 73 | - apiGroups: 74 | - argoproj.io 75 | resources: 76 | - workflowtaskresults 77 | verbs: 78 | - list 79 | - watch 80 | - deletecollection 81 | - apiGroups: 82 | - "" 83 | resources: 84 | - serviceaccounts 85 | verbs: 86 | - get 87 | - list 88 | - apiGroups: 89 | - "" 90 | resources: 91 | - secrets 92 | verbs: 93 | - get 94 | - apiGroups: 95 | - argoproj.io 96 | resources: 97 | - cronworkflows 98 | - cronworkflows/finalizers 99 | verbs: 100 | - get 101 | - list 102 | - watch 103 | - update 104 | - patch 105 | - delete 106 | - apiGroups: 107 | - "" 108 | resources: 109 | - events 110 | verbs: 111 | - create 112 | - patch 113 | - apiGroups: 114 | - "policy" 115 | resources: 116 | - poddisruptionbudgets 117 | verbs: 118 | - create 119 | - get 120 | - delete 121 | -------------------------------------------------------------------------------- /config/argo/rolebinding.argo-binding.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | kind: RoleBinding 4 | metadata: 5 | name: argo-binding 6 | namespace: argo 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: Role 10 | name: argo-role 11 | subjects: 12 | - kind: ServiceAccount 13 | name: argo 14 | namespace: argo 15 | -------------------------------------------------------------------------------- /config/argo/serviceaccount.argo.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: argo 6 | namespace: argo 7 | -------------------------------------------------------------------------------- /config/base/params.env: -------------------------------------------------------------------------------- 1 | IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator:latest 2 | IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:latest 3 | IMAGES_PERSISTENCEAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:latest 4 | IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:latest 5 | IMAGES_LAUNCHER=quay.io/opendatahub/ds-pipelines-launcher:latest 6 | IMAGES_DRIVER=quay.io/opendatahub/ds-pipelines-driver:latest 7 | IMAGES_PIPELINESRUNTIMEGENERIC=quay.io/opendatahub/ds-pipelines-runtime-generic:latest 8 | IMAGES_ARGO_WORKFLOWCONTROLLER=quay.io/opendatahub/ds-pipelines-argo-workflowcontroller:odh-v3.4.17-1 9 | IMAGES_ARGO_EXEC=quay.io/opendatahub/ds-pipelines-argo-argoexec:odh-v3.4.17-1 10 | IMAGES_MLMDGRPC=quay.io/opendatahub/mlmd-grpc-server:latest 11 | IMAGES_MLMDENVOY=registry.redhat.io/openshift-service-mesh/proxyv2-rhel9:2.6 12 | IMAGES_MARIADB=registry.redhat.io/rhel9/mariadb-105:latest 13 | IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy-rhel9:latest 14 | IMAGES_TOOLBOX=registry.redhat.io/ubi9/toolbox:latest 15 | IMAGES_RHELAI=registry.redhat.io/rhelai1/instructlab-nvidia-rhel9:1.5 16 | ZAP_LOG_LEVEL=info 17 | MAX_CONCURRENT_RECONCILES=10 18 | DSPO_HEALTHCHECK_DATABASE_CONNECTIONTIMEOUT=15s 19 | DSPO_HEALTHCHECK_OBJECTSTORE_CONNECTIONTIMEOUT=15s 20 | DSPO_REQUEUE_TIME=20s 21 | DSPO_APISERVER_INCLUDE_OWNERREFERENCE=true 22 | MANAGEDPIPELINES="{}" 23 | PLATFORMVERSION="v0.0.0" 24 | FIPSENABLED=false 25 | -------------------------------------------------------------------------------- /config/base/params.yaml: -------------------------------------------------------------------------------- 1 | varReference: 2 | - path: data 3 | kind: ConfigMap 4 | - path: spec/template/spec/containers/env/value 5 | kind: Deployment 6 | - path: spec/template/spec/containers/image 7 | kind: Deployment 8 | -------------------------------------------------------------------------------- /config/component_metadata.yaml: -------------------------------------------------------------------------------- 1 | releases: 2 | - name: Kubeflow Pipelines 3 | version: 2.5.0 4 | repoUrl: https://github.com/kubeflow/pipelines 5 | -------------------------------------------------------------------------------- /config/configmaps/files/config.yaml: -------------------------------------------------------------------------------- 1 | Images: 2 | ApiServer: $(IMAGES_APISERVER) 3 | PersistenceAgent: $(IMAGES_PERSISTENCEAGENT) 4 | ScheduledWorkflow: $(IMAGES_SCHEDULEDWORKFLOW) 5 | MlmdEnvoy: $(IMAGES_MLMDENVOY) 6 | MlmdGRPC: $(IMAGES_MLMDGRPC) 7 | ArgoExecImage: $(IMAGES_ARGO_EXEC) 8 | ArgoWorkflowController: $(IMAGES_ARGO_WORKFLOWCONTROLLER) 9 | LauncherImage: $(IMAGES_LAUNCHER) 10 | DriverImage: $(IMAGES_DRIVER) 11 | OAuthProxy: $(IMAGES_OAUTHPROXY) 12 | MariaDB: $(IMAGES_MARIADB) 13 | RuntimeGeneric: $(IMAGES_PIPELINESRUNTIMEGENERIC) 14 | Toolbox: $(IMAGES_TOOLBOX) 15 | RHELAI: $(IMAGES_RHELAI) 16 | ManagedPipelinesMetadata: 17 | Instructlab: 18 | Name: InstructLab 19 | Description: InstructLab fine-tunes models using synthetic data generation (SDG) techniques and a structured taxonomy to create diverse, high-quality training datasets. 20 | Filepath: /config/managed-pipelines/instructlab.yaml 21 | VersionName: InstructLab 22 | Iris: 23 | Name: "[Demo] iris-training" 24 | Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" 25 | Filepath: /samples/iris-pipeline-compiled.yaml 26 | VersionName: "[Demo] iris-training" 27 | DSPO: 28 | HealthCheck: 29 | Database: 30 | ConnectionTimeout: $(DSPO_HEALTHCHECK_DATABASE_CONNECTIONTIMEOUT) 31 | ObjectStore: 32 | ConnectionTimeout: $(DSPO_HEALTHCHECK_OBJECTSTORE_CONNECTIONTIMEOUT) 33 | RequeueTime: $(DSPO_REQUEUE_TIME) 34 | PlatformVersion: $(PLATFORMVERSION) 35 | FIPSEnabled: $(FIPSENABLED) 36 | -------------------------------------------------------------------------------- /config/configmaps/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | generatorOptions: 4 | disableNameSuffixHash: true 5 | configMapGenerator: 6 | - name: dspo-config 7 | files: 8 | - files/config.yaml 9 | -------------------------------------------------------------------------------- /config/crd/bases/scheduledworkflows.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | labels: 5 | application-crd-id: data-science-pipelines 6 | kubeflow/crd-install: "true" 7 | name: scheduledworkflows.kubeflow.org 8 | spec: 9 | conversion: 10 | strategy: None 11 | group: kubeflow.org 12 | names: 13 | kind: ScheduledWorkflow 14 | listKind: ScheduledWorkflowList 15 | plural: scheduledworkflows 16 | shortNames: 17 | - swf 18 | singular: scheduledworkflow 19 | scope: Namespaced 20 | versions: 21 | - name: v1beta1 22 | schema: 23 | openAPIV3Schema: 24 | properties: 25 | apiVersion: 26 | type: string 27 | kind: 28 | type: string 29 | metadata: 30 | type: object 31 | spec: 32 | type: object 33 | x-kubernetes-map-type: atomic 34 | x-kubernetes-preserve-unknown-fields: true 35 | status: 36 | type: object 37 | x-kubernetes-map-type: atomic 38 | x-kubernetes-preserve-unknown-fields: true 39 | required: 40 | - spec 41 | - status 42 | type: object 43 | served: true 44 | storage: true 45 | -------------------------------------------------------------------------------- /config/crd/external/monitoring.coreos.com_servicemonitors.yaml: -------------------------------------------------------------------------------- 1 | # This CRD is used for func/kind tests 2 | --- 3 | apiVersion: apiextensions.k8s.io/v1 4 | kind: CustomResourceDefinition 5 | metadata: 6 | name: servicemonitors.monitoring.coreos.com 7 | spec: 8 | group: monitoring.coreos.com 9 | versions: 10 | - name: v1 11 | served: true 12 | storage: true 13 | schema: 14 | openAPIV3Schema: 15 | type: object 16 | properties: 17 | spec: 18 | type: object 19 | required: 20 | - endpoints 21 | - selector 22 | properties: 23 | endpoints: 24 | type: array 25 | items: 26 | type: object 27 | required: 28 | - path 29 | - port 30 | properties: 31 | path: 32 | type: string 33 | port: 34 | type: string 35 | minItems: 1 36 | selector: 37 | type: object 38 | properties: 39 | matchLabels: 40 | type: object 41 | status: 42 | type: object 43 | properties: 44 | observedGeneration: 45 | type: integer 46 | format: int64 47 | scope: Namespaced 48 | names: 49 | plural: servicemonitors 50 | singular: servicemonitor 51 | kind: ServiceMonitor 52 | -------------------------------------------------------------------------------- /config/crd/kustomization.yaml: -------------------------------------------------------------------------------- 1 | resources: 2 | - bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml 3 | # +kubebuilder:scaffold:crdkustomizeresource 4 | - bases/scheduledworkflows.yaml 5 | 6 | configurations: 7 | - kustomizeconfig.yaml 8 | -------------------------------------------------------------------------------- /config/crd/kustomizeconfig.yaml: -------------------------------------------------------------------------------- 1 | # This file is for teaching kustomize how to substitute name and namespace reference in CRD 2 | nameReference: 3 | - kind: Service 4 | version: v1 5 | fieldSpecs: 6 | - kind: CustomResourceDefinition 7 | version: v1 8 | group: apiextensions.k8s.io 9 | path: spec/conversion/webhook/clientConfig/service/name 10 | 11 | namespace: 12 | - kind: CustomResourceDefinition 13 | version: v1 14 | group: apiextensions.k8s.io 15 | path: spec/conversion/webhook/clientConfig/service/namespace 16 | create: false 17 | 18 | varReference: 19 | - path: metadata/annotations 20 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/kfp_launcher_config.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | data: 3 | {{ if .APIServer.CustomKfpLauncherConfigMap }} 4 | {{.CustomKfpLauncherConfigMapData}} 5 | {{ else }} 6 | {{ if .ObjectStorageConnection.BasePath }} 7 | defaultPipelineRoot: s3://{{.ObjectStorageConnection.Bucket}}/{{.ObjectStorageConnection.BasePath}} 8 | {{ else }} 9 | defaultPipelineRoot: s3://{{.ObjectStorageConnection.Bucket}} 10 | {{ end }} 11 | providers: | 12 | s3: 13 | default: 14 | endpoint: {{.ObjectStorageConnection.Endpoint}} 15 | {{ if .ObjectStorageConnection.Secure }} 16 | disableSSL: false 17 | {{else}} 18 | disableSSL: true 19 | {{end}} 20 | region: {{.ObjectStorageConnection.Region}} 21 | credentials: 22 | {{if .ObjectStorageConnection.CredentialsSecret}} 23 | fromEnv: false 24 | secretRef: 25 | secretName: {{.ObjectStorageConnection.CredentialsSecret.SecretName}} 26 | accessKeyKey: {{.ObjectStorageConnection.CredentialsSecret.AccessKey}} 27 | secretKeyKey: {{.ObjectStorageConnection.CredentialsSecret.SecretKey}} 28 | {{else}} 29 | fromEnv: true 30 | {{end}} 31 | {{ end }} 32 | kind: ConfigMap 33 | metadata: 34 | name: kfp-launcher 35 | namespace: {{.Namespace}} 36 | labels: 37 | app: ds-pipeline-{{.Name}} 38 | component: data-science-pipelines 39 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/monitor.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: monitoring.coreos.com/v1 2 | kind: ServiceMonitor 3 | metadata: 4 | labels: 5 | app: {{.APIServerDefaultResourceName}} 6 | component: data-science-pipelines 7 | name: {{.APIServerDefaultResourceName}} 8 | namespace: {{.Namespace}} 9 | spec: 10 | endpoints: 11 | - path: /metrics 12 | port: http 13 | selector: 14 | matchLabels: 15 | app: {{.APIServerDefaultResourceName}} 16 | component: data-science-pipelines 17 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/role_ds-pipeline-user-access.yaml.tmpl: -------------------------------------------------------------------------------- 1 | kind: Role 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | metadata: 4 | name: ds-pipeline-user-access-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.APIServerDefaultResourceName}} 8 | component: data-science-pipelines 9 | rules: 10 | - apiGroups: 11 | - route.openshift.io 12 | resources: 13 | - routes 14 | verbs: 15 | - get 16 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/role_ds-pipeline.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: Role 3 | metadata: 4 | name: {{.APIServerDefaultResourceName}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.APIServerDefaultResourceName}} 8 | component: data-science-pipelines 9 | rules: 10 | - apiGroups: 11 | - "" 12 | resources: 13 | - pods 14 | - pods/log 15 | verbs: 16 | - get 17 | - list 18 | - delete 19 | - apiGroups: 20 | - "" 21 | resources: 22 | - secrets 23 | verbs: 24 | - get 25 | - list 26 | - apiGroups: 27 | - argoproj.io 28 | resources: 29 | - workflows 30 | verbs: 31 | - create 32 | - get 33 | - list 34 | - watch 35 | - update 36 | - patch 37 | - delete 38 | - apiGroups: 39 | - kubeflow.org 40 | resources: 41 | - scheduledworkflows 42 | verbs: 43 | - create 44 | - get 45 | - list 46 | - update 47 | - patch 48 | - delete 49 | - verbs: 50 | - update 51 | apiGroups: 52 | - kubeflow.org 53 | resources: 54 | - scheduledworkflows/finalizers 55 | - apiGroups: 56 | - authorization.k8s.io 57 | resources: 58 | - subjectaccessreviews 59 | verbs: 60 | - create 61 | - apiGroups: 62 | - authentication.k8s.io 63 | resources: 64 | - tokenreviews 65 | verbs: 66 | - create 67 | - apiGroups: 68 | - image.openshift.io 69 | resources: 70 | - imagestreamtags 71 | verbs: 72 | - get 73 | - apiGroups: 74 | - route.openshift.io 75 | verbs: 76 | - get 77 | resources: 78 | - routes 79 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/rolebinding_ds-pipeline.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: RoleBinding 3 | metadata: 4 | name: {{.APIServerDefaultResourceName}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.APIServerDefaultResourceName}} 8 | component: data-science-pipelines 9 | roleRef: 10 | apiGroup: rbac.authorization.k8s.io 11 | kind: Role 12 | name: {{.APIServerDefaultResourceName}} 13 | subjects: 14 | - kind: ServiceAccount 15 | name: {{.APIServerDefaultResourceName}} 16 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/rolebinding_pipeline-runner.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: RoleBinding 3 | metadata: 4 | name: pipeline-runner-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.APIServerDefaultResourceName}} 8 | component: data-science-pipelines 9 | roleRef: 10 | apiGroup: rbac.authorization.k8s.io 11 | kind: Role 12 | name: pipeline-runner-{{.Name}} 13 | subjects: 14 | - kind: ServiceAccount 15 | name: pipeline-runner-{{.Name}} 16 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/sa_ds-pipeline.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: {{.APIServerDefaultResourceName}} 5 | namespace: {{.Namespace}} 6 | annotations: 7 | serviceaccounts.openshift.io/oauth-redirectreference.primary: '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"{{.APIServerDefaultResourceName}}"}}' 8 | labels: 9 | app: {{.APIServerDefaultResourceName}} 10 | component: data-science-pipelines 11 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/sa_pipeline-runner.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: pipeline-runner-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.APIServerDefaultResourceName}} 8 | component: data-science-pipelines 9 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/server-config.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: ds-pipeline-server-config-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.APIServerDefaultResourceName}} 8 | component: data-science-pipelines 9 | data: 10 | config.json: | 11 | { 12 | "DBConfig": { 13 | "MySQLConfig": { 14 | "ExtraParams": {{ .DBConnection.ExtraParams }}, 15 | "GroupConcatMaxLen": "4194304" 16 | }, 17 | "PostgreSQLConfig": {}, 18 | "ConMaxLifeTime": "120s" 19 | }, 20 | "ObjectStoreConfig": { 21 | "PipelinePath": "pipelines" 22 | }, 23 | "DBDriverName": "mysql", 24 | "ARCHIVE_CONFIG_LOG_FILE_NAME": "main.log", 25 | "ARCHIVE_CONFIG_LOG_PATH_PREFIX": "/artifacts", 26 | "InitConnectionTimeout": "6m" 27 | } 28 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/service.ml-pipeline.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: ml-pipeline 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: ds-pipeline-{{.Name}} 8 | component: data-science-pipelines 9 | spec: 10 | ports: 11 | - name: oauth 12 | port: 8443 13 | protocol: TCP 14 | targetPort: oauth 15 | - name: http 16 | port: 8888 17 | protocol: TCP 18 | targetPort: http 19 | - name: grpc 20 | port: 8887 21 | protocol: TCP 22 | targetPort: 8887 23 | selector: 24 | app: ds-pipeline-{{.Name}} 25 | component: data-science-pipelines 26 | -------------------------------------------------------------------------------- /config/internal/apiserver/default/service.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{.APIServerServiceName}} 5 | namespace: {{.Namespace}} 6 | annotations: 7 | service.beta.openshift.io/serving-cert-secret-name: ds-pipelines-proxy-tls-{{.Name}} 8 | labels: 9 | app: {{.APIServerDefaultResourceName}} 10 | component: data-science-pipelines 11 | spec: 12 | ports: 13 | {{ if .APIServer.EnableRoute }} 14 | - name: oauth 15 | port: 8443 16 | protocol: TCP 17 | targetPort: oauth 18 | {{ end }} 19 | - name: http 20 | port: 8888 21 | protocol: TCP 22 | targetPort: http 23 | - name: grpc 24 | port: 8887 25 | protocol: TCP 26 | targetPort: 8887 27 | selector: 28 | app: {{.APIServerDefaultResourceName}} 29 | component: data-science-pipelines 30 | -------------------------------------------------------------------------------- /config/internal/apiserver/route/route.yaml.tmpl: -------------------------------------------------------------------------------- 1 | kind: Route 2 | apiVersion: route.openshift.io/v1 3 | metadata: 4 | name: {{.APIServerDefaultResourceName}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.APIServerDefaultResourceName}} 8 | component: data-science-pipelines 9 | annotations: 10 | kubernetes.io/tls-acme: "true" 11 | spec: 12 | to: 13 | kind: Service 14 | name: {{.APIServerDefaultResourceName}} 15 | weight: 100 16 | port: 17 | targetPort: oauth 18 | tls: 19 | termination: Reencrypt 20 | insecureEdgeTerminationPolicy: Redirect 21 | -------------------------------------------------------------------------------- /config/internal/apiserver/sample-pipeline/sample-config.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: sample-config-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.APIServerDefaultResourceName}} 8 | component: data-science-pipelines 9 | data: 10 | sample_config.json: |- 11 | {{ .SampleConfigJSON }} 12 | -------------------------------------------------------------------------------- /config/internal/common/default/mlmd-envoy-dashboard-access-policy.yaml.tmpl: -------------------------------------------------------------------------------- 1 | kind: NetworkPolicy 2 | apiVersion: networking.k8s.io/v1 3 | metadata: 4 | name: ds-pipelines-envoy-{{ .Name }} 5 | namespace: {{ .Namespace }} 6 | spec: 7 | podSelector: 8 | matchLabels: 9 | app: ds-pipeline-metadata-envoy-{{ .Name }} 10 | component: data-science-pipelines 11 | ingress: 12 | - ports: 13 | - protocol: TCP 14 | port: 8443 15 | - ports: 16 | - protocol: TCP 17 | port: 9090 18 | from: 19 | - podSelector: 20 | matchLabels: 21 | component: data-science-pipelines 22 | policyTypes: 23 | - Ingress 24 | -------------------------------------------------------------------------------- /config/internal/common/no-owner/clusterrolebinding.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: ds-pipeline-ui-auth-delegator-{{.Namespace}}-{{.Name}} 5 | roleRef: 6 | apiGroup: rbac.authorization.k8s.io 7 | kind: ClusterRole 8 | name: system:auth-delegator 9 | subjects: 10 | - kind: ServiceAccount 11 | namespace: {{.Namespace}} 12 | name: ds-pipeline-ui-{{.Name}} 13 | - kind: ServiceAccount 14 | namespace: {{.Namespace}} 15 | name: ds-pipeline-{{.Name}} 16 | - kind: ServiceAccount 17 | namespace: {{.Namespace}} 18 | name: ds-pipeline-metadata-envoy-{{.Name}} 19 | -------------------------------------------------------------------------------- /config/internal/devtools/database.secret.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: "{{.DBConnection.CredentialsSecret.Name}}" 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: mariadb-{{.Name}} 8 | component: data-science-pipelines 9 | data: 10 | password: {{.DBConnection.Password}} 11 | -------------------------------------------------------------------------------- /config/internal/devtools/storage.secret.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: "{{.ObjectStorageConnection.CredentialsSecret.SecretName}}" 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: minio-{{.Name}} 8 | component: data-science-pipelines 9 | stringData: 10 | host: "{{.ObjectStorageConnection.Host}}" 11 | port: "{{.ObjectStorageConnection.Port}}" 12 | secure: "{{.ObjectStorageConnection.Secure}}" 13 | data: 14 | accesskey: "{{.ObjectStorageConnection.AccessKeyID}}" 15 | secretkey: "{{.ObjectStorageConnection.SecretAccessKey}}" 16 | -------------------------------------------------------------------------------- /config/internal/mariadb/default/mariadb-sa.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: ds-pipelines-mariadb-sa-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: mariadb-{{.Name}} 8 | component: data-science-pipelines 9 | -------------------------------------------------------------------------------- /config/internal/mariadb/default/networkpolicy.yaml.tmpl: -------------------------------------------------------------------------------- 1 | kind: NetworkPolicy 2 | apiVersion: networking.k8s.io/v1 3 | metadata: 4 | name: mariadb-{{.Name}} 5 | namespace: {{.Namespace}} 6 | spec: 7 | podSelector: 8 | matchLabels: 9 | app: mariadb-{{.Name}} 10 | component: data-science-pipelines 11 | ingress: 12 | - ports: 13 | - protocol: TCP 14 | port: 3306 15 | from: 16 | - podSelector: 17 | matchLabels: 18 | app.kubernetes.io/name: data-science-pipelines-operator 19 | namespaceSelector: 20 | matchLabels: 21 | kubernetes.io/metadata.name: {{.DSPONamespace}} 22 | - podSelector: 23 | matchLabels: 24 | app: {{.APIServerDefaultResourceName}} 25 | component: data-science-pipelines 26 | - podSelector: 27 | matchLabels: 28 | app: ds-pipeline-metadata-grpc-{{.Name}} 29 | component: data-science-pipelines 30 | 31 | policyTypes: 32 | - Ingress 33 | -------------------------------------------------------------------------------- /config/internal/mariadb/default/pvc.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolumeClaim 3 | metadata: 4 | name: mariadb-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: mariadb-{{.Name}} 8 | component: data-science-pipelines 9 | spec: 10 | accessModes: 11 | - ReadWriteOnce 12 | {{- if .MariaDB.StorageClassName }} 13 | storageClassName: {{.MariaDB.StorageClassName}} 14 | {{- end }} 15 | resources: 16 | requests: 17 | storage: {{.MariaDB.PVCSize}} 18 | -------------------------------------------------------------------------------- /config/internal/mariadb/default/service.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: mariadb-{{.Name}} 5 | namespace: {{.Namespace}} 6 | {{ if .PodToPodTLS }} 7 | annotations: 8 | service.beta.openshift.io/serving-cert-secret-name: ds-pipelines-mariadb-tls-{{.Name}} 9 | {{ end }} 10 | labels: 11 | app: mariadb-{{.Name}} 12 | component: data-science-pipelines 13 | spec: 14 | ports: 15 | - port: 3306 16 | protocol: TCP 17 | targetPort: 3306 18 | selector: 19 | app: mariadb-{{.Name}} 20 | component: data-science-pipelines 21 | -------------------------------------------------------------------------------- /config/internal/mariadb/default/tls-config.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: ds-pipelines-mariadb-tls-config-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: mariadb-{{.Name}} 8 | component: data-science-pipelines 9 | data: 10 | mariadb-tls-config.cnf: | 11 | [mariadb] 12 | ssl_cert = /.mariadb/certs/tls.crt 13 | ssl_key = /.mariadb/certs/tls.key 14 | -------------------------------------------------------------------------------- /config/internal/mariadb/generated-secret/secret.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: "{{.DBConnection.CredentialsSecret.Name}}" 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: mariadb-{{.Name}} 8 | component: data-science-pipelines 9 | data: 10 | {{.DBConnection.CredentialsSecret.Key}}: "{{.DBConnection.Password}}" 11 | -------------------------------------------------------------------------------- /config/internal/minio/default/minio-sa.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: ds-pipelines-minio-sa-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: minio-{{.Name}} 8 | component: data-science-pipelines 9 | -------------------------------------------------------------------------------- /config/internal/minio/default/pvc.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolumeClaim 3 | metadata: 4 | name: minio-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: minio-{{.Name}} 8 | component: data-science-pipelines 9 | spec: 10 | accessModes: 11 | - ReadWriteOnce 12 | {{- if .Minio.StorageClassName }} 13 | storageClassName: {{.Minio.StorageClassName}} 14 | {{- end }} 15 | resources: 16 | requests: 17 | storage: {{.Minio.PVCSize}} 18 | -------------------------------------------------------------------------------- /config/internal/minio/default/service.minioservice.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: minio-service 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: minio-{{.Name}} 8 | component: data-science-pipelines 9 | spec: 10 | ports: 11 | - name: http 12 | port: 9000 13 | protocol: TCP 14 | targetPort: 9000 15 | selector: 16 | app: minio-{{.Name}} 17 | component: data-science-pipelines 18 | -------------------------------------------------------------------------------- /config/internal/minio/default/service.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: minio-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: minio-{{.Name}} 8 | component: data-science-pipelines 9 | spec: 10 | ports: 11 | - name: http 12 | port: 9000 13 | protocol: TCP 14 | targetPort: 9000 15 | # Work around to enable kfp ui to fetch artifacts for viewer 16 | # S3 generic endpoint for kfp UI only supports rest port 17 | # since default minio is http, and we disable ssl via "AWS_SSL" env var 18 | # https://github.com/opendatahub-io/data-science-pipelines/blob/83d7e719d08c73c2c535722b66b77cdf0cb4cd08/frontend/server/handlers/artifacts.ts#L104 19 | - name: kfp-ui-http 20 | port: 80 21 | protocol: TCP 22 | targetPort: 9000 23 | selector: 24 | app: minio-{{.Name}} 25 | component: data-science-pipelines 26 | -------------------------------------------------------------------------------- /config/internal/minio/generated-secret/secret.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: "{{.ObjectStorageConnection.CredentialsSecret.SecretName}}" 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: minio-{{.Name}} 8 | component: data-science-pipelines 9 | stringData: 10 | host: "{{.ObjectStorageConnection.Host}}" 11 | port: "{{.ObjectStorageConnection.Port}}" 12 | secure: "{{.ObjectStorageConnection.Secure}}" 13 | data: 14 | {{.ObjectStorageConnection.CredentialsSecret.AccessKey}}: "{{.ObjectStorageConnection.AccessKeyID}}" 15 | {{.ObjectStorageConnection.CredentialsSecret.SecretKey}}: "{{.ObjectStorageConnection.SecretAccessKey}}" 16 | -------------------------------------------------------------------------------- /config/internal/minio/route.yaml.tmpl: -------------------------------------------------------------------------------- 1 | kind: Route 2 | apiVersion: route.openshift.io/v1 3 | metadata: 4 | name: minio-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: minio-{{.Name}} 8 | component: data-science-pipelines 9 | spec: 10 | to: 11 | kind: Service 12 | name: minio-{{.Name}} 13 | weight: 100 14 | port: 15 | targetPort: 9000 16 | tls: 17 | termination: Edge 18 | insecureEdgeTerminationPolicy: Redirect 19 | -------------------------------------------------------------------------------- /config/internal/ml-metadata/grpc-service/metadata-grpc.ml-pipeline.service.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: metadata-grpc-service 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: ds-pipeline-metadata-grpc-{{.Name}} 8 | component: data-science-pipelines 9 | spec: 10 | ports: 11 | - name: grpc-api 12 | port: {{.MLMD.GRPC.Port}} 13 | protocol: TCP 14 | selector: 15 | app: ds-pipeline-metadata-grpc-{{.Name}} 16 | component: data-science-pipelines 17 | type: ClusterIP 18 | -------------------------------------------------------------------------------- /config/internal/ml-metadata/grpc-service/metadata-grpc.service.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: ds-pipeline-metadata-grpc-{{.Name}} 5 | namespace: {{.Namespace}} 6 | {{ if .PodToPodTLS }} 7 | annotations: 8 | service.beta.openshift.io/serving-cert-secret-name: ds-pipeline-metadata-grpc-tls-certs-{{.Name}} 9 | {{ end }} 10 | labels: 11 | app: ds-pipeline-metadata-grpc-{{.Name}} 12 | component: data-science-pipelines 13 | dspa: {{.Name}} 14 | spec: 15 | ports: 16 | - name: grpc-api 17 | port: {{.MLMD.GRPC.Port}} 18 | protocol: TCP 19 | selector: 20 | app: ds-pipeline-metadata-grpc-{{.Name}} 21 | component: data-science-pipelines 22 | type: ClusterIP 23 | -------------------------------------------------------------------------------- /config/internal/ml-metadata/metadata-envoy.service.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | labels: 5 | app: ds-pipeline-metadata-envoy-{{.Name}} 6 | component: data-science-pipelines 7 | name: ds-pipeline-md-{{.Name}} 8 | annotations: 9 | service.beta.openshift.io/serving-cert-secret-name: ds-pipelines-envoy-proxy-tls-{{.Name}} 10 | namespace: {{.Namespace}} 11 | spec: 12 | ports: 13 | - name: md-envoy 14 | port: 9090 15 | protocol: TCP 16 | - name: oauth2-proxy 17 | port: 8443 18 | protocol: TCP 19 | selector: 20 | app: ds-pipeline-metadata-envoy-{{.Name}} 21 | component: data-science-pipelines 22 | type: ClusterIP 23 | -------------------------------------------------------------------------------- /config/internal/ml-metadata/metadata-envoy.serviceaccount.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: ds-pipeline-metadata-envoy-{{.Name}} 5 | namespace: {{.Namespace}} 6 | annotations: 7 | serviceaccounts.openshift.io/oauth-redirectreference.primary: '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"ds-pipeline-md-{{.Name}}"}}' 8 | labels: 9 | app: ds-pipeline-metadata-envoy-{{.Name}} 10 | component: data-science-pipelines 11 | -------------------------------------------------------------------------------- /config/internal/ml-metadata/metadata-grpc-tls-config-secret.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: ds-pipeline-metadata-grpc-tls-config-secret-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | component: metadata-grpc-server 8 | stringData: 9 | config.proto: | 10 | connection_config { 11 | mysql { 12 | host: "{{.DBConnection.Host}}" 13 | port: {{.DBConnection.Port}} 14 | database: "{{.DBConnection.DBName}}" 15 | user: "{{.DBConnection.Username}}" 16 | password: "{{.DBConnection.DecodedPassword}}" 17 | } 18 | } 19 | ssl_config { 20 | server_cert: "{{.MlmdGrpcCertificateContents}}" 21 | server_key: "{{.MlmdGrpcPrivateKeyContents}}" 22 | client_verify: false 23 | } 24 | -------------------------------------------------------------------------------- /config/internal/ml-metadata/metadata-grpc.configmap.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: metadata-grpc-configmap 5 | namespace: {{.Namespace}} 6 | labels: 7 | component: metadata-grpc-server 8 | data: 9 | METADATA_GRPC_SERVICE_HOST: "ds-pipeline-metadata-grpc-{{.Name}}.{{.Namespace}}.svc.cluster.local" 10 | METADATA_GRPC_SERVICE_PORT: "8080" 11 | -------------------------------------------------------------------------------- /config/internal/ml-metadata/metadata-grpc.networkpolicy.yaml.tmpl: -------------------------------------------------------------------------------- 1 | kind: NetworkPolicy 2 | apiVersion: networking.k8s.io/v1 3 | metadata: 4 | name: ds-pipeline-metadata-grpc-{{ .Name }} 5 | namespace: {{ .Namespace }} 6 | spec: 7 | podSelector: 8 | matchLabels: 9 | app: ds-pipeline-metadata-grpc-{{ .Name }} 10 | component: data-science-pipelines 11 | ingress: 12 | - ports: 13 | - protocol: TCP 14 | port: 8080 15 | from: 16 | - podSelector: 17 | matchLabels: 18 | pipelines.kubeflow.org/v2_component: 'true' 19 | - podSelector: 20 | matchLabels: 21 | component: data-science-pipelines 22 | policyTypes: 23 | - Ingress 24 | -------------------------------------------------------------------------------- /config/internal/ml-metadata/metadata-grpc.serviceaccount.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: ds-pipeline-metadata-grpc-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: ds-pipeline-metadata-grpc-{{.Name}} 8 | component: data-science-pipelines 9 | -------------------------------------------------------------------------------- /config/internal/ml-metadata/route/metadata-envoy.route.yaml.tmpl: -------------------------------------------------------------------------------- 1 | kind: Route 2 | apiVersion: route.openshift.io/v1 3 | metadata: 4 | name: ds-pipeline-md-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: ds-pipeline-metadata-envoy-{{.Name}} 8 | component: data-science-pipelines 9 | annotations: 10 | kubernetes.io/tls-acme: "true" 11 | spec: 12 | to: 13 | kind: Service 14 | name: ds-pipeline-metadata-envoy-{{.Name}} 15 | weight: 100 16 | port: 17 | targetPort: oauth2-proxy 18 | tls: 19 | termination: Reencrypt 20 | insecureEdgeTerminationPolicy: Redirect 21 | -------------------------------------------------------------------------------- /config/internal/mlpipelines-ui/configmap.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | data: 3 | viewer-pod-template.json: |- 4 | { 5 | "spec": { 6 | "serviceAccountName": "ds-pipelines-viewer-{{.Name}}" 7 | } 8 | } 9 | kind: ConfigMap 10 | metadata: 11 | name: ds-pipeline-ui-configmap-{{.Name}} 12 | namespace: {{.Namespace}} 13 | labels: 14 | app: ds-pipeline-ui-{{.Name}} 15 | component: data-science-pipelines 16 | -------------------------------------------------------------------------------- /config/internal/mlpipelines-ui/role.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: Role 3 | metadata: 4 | name: ds-pipeline-ui-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: ds-pipeline-ui-{{.Name}} 8 | component: data-science-pipelines 9 | rules: 10 | - apiGroups: 11 | - "" 12 | resources: 13 | - pods 14 | - pods/log 15 | verbs: 16 | - get 17 | - apiGroups: 18 | - "" 19 | resources: 20 | - events 21 | verbs: 22 | - list 23 | - apiGroups: 24 | - "" 25 | resources: 26 | - secrets 27 | verbs: 28 | - get 29 | - list 30 | - apiGroups: 31 | - argoproj.io 32 | resources: 33 | - workflows 34 | verbs: 35 | - get 36 | - list 37 | - apiGroups: 38 | - route.openshift.io 39 | verbs: 40 | - get 41 | resources: 42 | - routes 43 | -------------------------------------------------------------------------------- /config/internal/mlpipelines-ui/rolebinding.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: RoleBinding 3 | metadata: 4 | name: ds-pipeline-ui-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: ds-pipeline-ui-{{.Name}} 8 | component: data-science-pipelines 9 | roleRef: 10 | apiGroup: rbac.authorization.k8s.io 11 | kind: Role 12 | name: ds-pipeline-ui-{{.Name}} 13 | subjects: 14 | - kind: ServiceAccount 15 | name: ds-pipeline-ui-{{.Name}} 16 | -------------------------------------------------------------------------------- /config/internal/mlpipelines-ui/route.yaml.tmpl: -------------------------------------------------------------------------------- 1 | kind: Route 2 | apiVersion: route.openshift.io/v1 3 | metadata: 4 | name: ds-pipeline-ui-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: ds-pipeline-ui-{{.Name}} 8 | component: data-science-pipelines 9 | annotations: 10 | kubernetes.io/tls-acme: "true" 11 | spec: 12 | to: 13 | kind: Service 14 | name: ds-pipeline-ui-{{.Name}} 15 | weight: 100 16 | port: 17 | targetPort: 8443 18 | tls: 19 | termination: Reencrypt 20 | insecureEdgeTerminationPolicy: Redirect 21 | -------------------------------------------------------------------------------- /config/internal/mlpipelines-ui/sa-ds-pipeline-ui.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: ds-pipeline-ui-{{.Name}} 5 | namespace: {{.Namespace}} 6 | annotations: 7 | serviceaccounts.openshift.io/oauth-redirectreference.primary: '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"ds-pipeline-ui-{{.Name}}"}}' 8 | labels: 9 | app: ds-pipeline-ui-{{.Name}} 10 | component: data-science-pipelines 11 | -------------------------------------------------------------------------------- /config/internal/mlpipelines-ui/sa_ds-pipelines-viewer.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: ds-pipelines-viewer-{{.Name}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: ds-pipeline-ui-{{.Name}} 8 | component: data-science-pipelines 9 | -------------------------------------------------------------------------------- /config/internal/mlpipelines-ui/service.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: ds-pipeline-ui-{{.Name}} 5 | namespace: {{.Namespace}} 6 | annotations: 7 | service.beta.openshift.io/serving-cert-secret-name: ds-pipelines-ui-proxy-tls-{{.Name}} 8 | labels: 9 | app: ds-pipeline-ui-{{.Name}} 10 | component: data-science-pipelines 11 | spec: 12 | ports: 13 | - name: http 14 | port: 8443 15 | protocol: TCP 16 | targetPort: 8443 17 | selector: 18 | app: ds-pipeline-ui-{{.Name}} 19 | component: data-science-pipelines 20 | -------------------------------------------------------------------------------- /config/internal/persistence-agent/role.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: Role 3 | metadata: 4 | name: {{.PersistentAgentDefaultResourceName}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.PersistentAgentDefaultResourceName}} 8 | component: data-science-pipelines 9 | rules: 10 | - apiGroups: 11 | - argoproj.io 12 | resources: 13 | - workflows 14 | verbs: 15 | - get 16 | - list 17 | - watch 18 | - apiGroups: 19 | - kubeflow.org 20 | resources: 21 | - scheduledworkflows 22 | verbs: 23 | - get 24 | - list 25 | - watch 26 | -------------------------------------------------------------------------------- /config/internal/persistence-agent/rolebinding.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: RoleBinding 3 | metadata: 4 | name: {{.PersistentAgentDefaultResourceName}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.PersistentAgentDefaultResourceName}} 8 | component: data-science-pipelines 9 | roleRef: 10 | apiGroup: rbac.authorization.k8s.io 11 | kind: Role 12 | name: {{.PersistentAgentDefaultResourceName}} 13 | subjects: 14 | - kind: ServiceAccount 15 | namespace: {{.Namespace}} 16 | name: {{.PersistentAgentDefaultResourceName}} 17 | -------------------------------------------------------------------------------- /config/internal/persistence-agent/sa.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: {{.PersistentAgentDefaultResourceName}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.PersistentAgentDefaultResourceName}} 8 | component: data-science-pipelines 9 | -------------------------------------------------------------------------------- /config/internal/scheduled-workflow/role.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: Role 3 | metadata: 4 | name: {{.ScheduledWorkflowDefaultResourceName}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.ScheduledWorkflowDefaultResourceName}} 8 | component: data-science-pipelines 9 | rules: 10 | - apiGroups: 11 | - argoproj.io 12 | resources: 13 | - workflows 14 | verbs: 15 | - create 16 | - get 17 | - list 18 | - watch 19 | - update 20 | - patch 21 | - delete 22 | - apiGroups: 23 | - kubeflow.org 24 | resources: 25 | - scheduledworkflows 26 | - scheduledworkflows/finalizers 27 | verbs: 28 | - create 29 | - get 30 | - list 31 | - watch 32 | - update 33 | - patch 34 | - delete 35 | - apiGroups: 36 | - "" 37 | resources: 38 | - events 39 | verbs: 40 | - create 41 | - patch 42 | -------------------------------------------------------------------------------- /config/internal/scheduled-workflow/rolebinding.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: RoleBinding 3 | metadata: 4 | name: {{.ScheduledWorkflowDefaultResourceName}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.ScheduledWorkflowDefaultResourceName}} 8 | component: data-science-pipelines 9 | roleRef: 10 | apiGroup: rbac.authorization.k8s.io 11 | kind: Role 12 | name: {{.ScheduledWorkflowDefaultResourceName}} 13 | subjects: 14 | - kind: ServiceAccount 15 | name: {{.ScheduledWorkflowDefaultResourceName}} 16 | -------------------------------------------------------------------------------- /config/internal/scheduled-workflow/sa.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | name: {{.ScheduledWorkflowDefaultResourceName}} 5 | namespace: {{.Namespace}} 6 | labels: 7 | app: {{.ScheduledWorkflowDefaultResourceName}} 8 | component: data-science-pipelines 9 | -------------------------------------------------------------------------------- /config/internal/workflow-controller/configmap.yaml.tmpl: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ConfigMap 4 | metadata: 5 | annotations: 6 | internal.kpt.dev/upstream-identifier: '|ConfigMap|default|workflow-controller-configmap' 7 | labels: 8 | app: ds-pipeline-workflow-controller-{{.Name}} 9 | component: data-science-pipelines 10 | dspa: {{.Name}} 11 | name: ds-pipeline-workflow-controller-{{.Name}} 12 | namespace: {{.Namespace}} 13 | data: 14 | artifactRepository: | 15 | archiveLogs: false 16 | s3: 17 | endpoint: "{{.ObjectStorageConnection.Endpoint}}" 18 | bucket: "{{.ObjectStorageConnection.Bucket}}" 19 | # keyFormat is a format pattern to define how artifacts will be organized in a bucket. 20 | # It can reference workflow metadata variables such as workflow.namespace, workflow.name, 21 | # pod.name. Can also use strftime formating of workflow.creationTimestamp so that workflow 22 | # artifacts can be organized by date. If omitted, will use `\{\{workflow.name\}\}/\{\{pod.name\}\}`, 23 | # which has potential for have collisions, because names do not guarantee they are unique 24 | # over the lifetime of the cluster. 25 | # Refer to https://kubernetes.io/docs/concepts/overview/working-with-objects/names/. 26 | # 27 | # The following format looks like: 28 | # artifacts/my-workflow-abc123/2018/08/23/my-workflow-abc123-1234567890 29 | # Adding date into the path greatly reduces the chance of \{\{pod.name\}\} collision. 30 | # keyFormat: "artifacts/\{\{workflow.name\}\}/\{\{workflow.creationTimestamp.Y\}\}/\{\{workflow.creationTimestamp.m\}\}/\{\{workflow.creationTimestamp.d\}\}/\{\{pod.name\}\}" # TODO 31 | # insecure will disable TLS. Primarily used for minio installs not configured with TLS 32 | insecure: {{.ObjectStorageConnection.Secure}} 33 | accessKeySecret: 34 | name: "{{.ObjectStorageConnection.CredentialsSecret.SecretName}}" 35 | key: "{{.ObjectStorageConnection.CredentialsSecret.AccessKey}}" 36 | secretKeySecret: 37 | name: "{{.ObjectStorageConnection.CredentialsSecret.SecretName}}" 38 | key: "{{.ObjectStorageConnection.CredentialsSecret.SecretKey}}" 39 | -------------------------------------------------------------------------------- /config/internal/workflow-controller/rolebinding.yaml.tmpl: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | kind: RoleBinding 4 | metadata: 5 | annotations: 6 | internal.kpt.dev/upstream-identifier: rbac.authorization.k8s.io|RoleBinding|default|argo-binding 7 | labels: 8 | app: ds-pipeline-workflow-controller-{{.Name}} 9 | component: data-science-pipelines 10 | dspa: {{.Name}} 11 | name: ds-pipeline-workflow-controller-rolebinding-{{.Name}} 12 | namespace: {{.Namespace}} 13 | roleRef: 14 | apiGroup: rbac.authorization.k8s.io 15 | kind: Role 16 | name: ds-pipeline-workflow-controller-role-{{.Name}} 17 | subjects: 18 | - kind: ServiceAccount 19 | name: ds-pipeline-workflow-controller-{{.Name}} 20 | namespace: {{.Namespace}} 21 | -------------------------------------------------------------------------------- /config/internal/workflow-controller/sa.yaml.tmpl: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | annotations: 6 | internal.kpt.dev/upstream-identifier: '|ServiceAccount|default|argo' 7 | labels: 8 | app: ds-pipeline-workflow-controller-{{.Name}} 9 | component: data-science-pipelines 10 | dspa: {{.Name}} 11 | name: ds-pipeline-workflow-controller-{{.Name}} 12 | namespace: {{.Namespace}} 13 | -------------------------------------------------------------------------------- /config/internal/workflow-controller/service.yaml.tmpl: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | annotations: 6 | internal.kpt.dev/upstream-identifier: '|Service|default|workflow-controller-metrics' 7 | workflows.argoproj.io/description: | 8 | This service is deprecated. It will be removed in v3.4. 9 | 10 | https://github.com/argoproj/argo-workflows/issues/8441 11 | labels: 12 | app: ds-pipeline-workflow-controller-{{.Name}} 13 | component: data-science-pipelines 14 | dspa: {{.Name}} 15 | name: ds-pipeline-workflow-controller-metrics-{{.Name}} 16 | namespace: {{.Namespace}} 17 | spec: 18 | ports: 19 | - name: metrics 20 | port: 9090 21 | protocol: TCP 22 | targetPort: 9090 23 | selector: 24 | app: ds-pipeline-workflow-controller-{{.Name}} 25 | component: data-science-pipelines 26 | dspa: {{.Name}} 27 | -------------------------------------------------------------------------------- /config/manager/kustomization.yaml: -------------------------------------------------------------------------------- 1 | resources: 2 | - manager.yaml 3 | - manager-service.yaml 4 | -------------------------------------------------------------------------------- /config/manager/manager-service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: service 5 | labels: 6 | app.kubernetes.io/name: data-science-pipelines-operator 7 | spec: 8 | ports: 9 | - name: metrics 10 | port: 8080 11 | selector: 12 | app.kubernetes.io/name: data-science-pipelines-operator 13 | -------------------------------------------------------------------------------- /config/manifests/kustomization.yaml: -------------------------------------------------------------------------------- 1 | resources: 2 | - bases/data-science-pipelines-operator.clusterserviceversion.yaml 3 | - ../default 4 | -------------------------------------------------------------------------------- /config/overlays/kind-tests/img_patch.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: data-science-pipelines-operator-controller-manager 5 | namespace: opendatahub 6 | spec: 7 | template: 8 | spec: 9 | containers: 10 | - image: controller 11 | name: manager 12 | -------------------------------------------------------------------------------- /config/overlays/kind-tests/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | namespace: opendatahub 4 | resources: 5 | - ../../base 6 | patchesStrategicMerge: 7 | - img_patch.yaml 8 | - res_patch.yaml 9 | - user_patch.yaml 10 | images: 11 | - name: controller 12 | newName: quay.io/opendatahub/data-science-pipelines-operator 13 | newTag: main 14 | -------------------------------------------------------------------------------- /config/overlays/kind-tests/res_patch.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: controller-manager 5 | namespace: datasciencepipelinesapplications-controller 6 | spec: 7 | replicas: 1 8 | template: 9 | spec: 10 | containers: 11 | - name: manager 12 | resources: 13 | limits: 14 | cpu: 20m 15 | memory: 200Mi 16 | requests: 17 | cpu: 10m 18 | memory: 64Mi 19 | -------------------------------------------------------------------------------- /config/overlays/kind-tests/user_patch.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: controller-manager 5 | namespace: datasciencepipelinesapplications-controller 6 | spec: 7 | replicas: 1 8 | template: 9 | spec: 10 | containers: 11 | - name: manager 12 | securityContext: 13 | runAsUser: 1000 14 | -------------------------------------------------------------------------------- /config/overlays/make-deploy/img_patch.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: data-science-pipelines-operator-controller-manager 5 | namespace: opendatahub 6 | spec: 7 | template: 8 | spec: 9 | containers: 10 | - image: controller 11 | name: manager 12 | -------------------------------------------------------------------------------- /config/overlays/make-deploy/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | namespace: opendatahub 4 | resources: 5 | - ../../base 6 | - ../../argo 7 | patchesStrategicMerge: 8 | - img_patch.yaml 9 | images: 10 | - name: controller 11 | newName: quay.io/opendatahub/data-science-pipelines-operator 12 | newTag: main 13 | -------------------------------------------------------------------------------- /config/overlays/odh/argo/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - ../../../argo 5 | namespace: opendatahub 6 | -------------------------------------------------------------------------------- /config/overlays/odh/dspo/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - ../../../base 5 | namespace: opendatahub 6 | -------------------------------------------------------------------------------- /config/overlays/odh/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - ../../argo 5 | - ../../base 6 | namespace: opendatahub 7 | -------------------------------------------------------------------------------- /config/overlays/rhoai/argo/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - ../../../argo 5 | namespace: redhat-ods-applications 6 | -------------------------------------------------------------------------------- /config/overlays/rhoai/dspo/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - ../../../base 5 | namespace: redhat-ods-applications 6 | -------------------------------------------------------------------------------- /config/overlays/rhoai/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - ../../base 5 | - ../../argo 6 | namespace: redhat-ods-applications 7 | -------------------------------------------------------------------------------- /config/prometheus/kustomization.yaml: -------------------------------------------------------------------------------- 1 | resources: 2 | - monitor.yaml 3 | -------------------------------------------------------------------------------- /config/prometheus/monitor.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: monitoring.coreos.com/v1 2 | kind: ServiceMonitor 3 | metadata: 4 | name: service-monitor 5 | namespace: data-science-pipelines-operator 6 | spec: 7 | endpoints: 8 | - path: /metrics 9 | port: metrics 10 | selector: 11 | matchLabels: 12 | app.kubernetes.io/name: data-science-pipelines-operator 13 | -------------------------------------------------------------------------------- /config/rbac/aggregate_dspa_role_edit.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | labels: 5 | rbac.authorization.k8s.io/aggregate-to-admin: "true" 6 | rbac.authorization.k8s.io/aggregate-to-edit: "true" 7 | name: aggregate-dspa-admin-edit 8 | rules: 9 | - apiGroups: 10 | - datasciencepipelinesapplications.opendatahub.io 11 | resources: 12 | - datasciencepipelinesapplications 13 | verbs: 14 | - get 15 | - list 16 | - watch 17 | - create 18 | - update 19 | - patch 20 | - delete 21 | -------------------------------------------------------------------------------- /config/rbac/aggregate_dspa_role_view.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRole 3 | metadata: 4 | labels: 5 | rbac.authorization.k8s.io/aggregate-to-view: "true" 6 | name: aggregate-dspa-admin-view 7 | rules: 8 | - apiGroups: 9 | - datasciencepipelinesapplications.opendatahub.io 10 | resources: 11 | - datasciencepipelinesapplications 12 | verbs: 13 | - get 14 | - list 15 | - watch 16 | -------------------------------------------------------------------------------- /config/rbac/argo_role_binding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | labels: 5 | app.kubernetes.io/name: data-science-pipelines-operator 6 | name: manager-argo-rolebinding 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: ClusterRole 10 | name: manager-argo-role 11 | subjects: 12 | - kind: ServiceAccount 13 | name: controller-manager 14 | namespace: datasciencepipelinesapplications-controller 15 | -------------------------------------------------------------------------------- /config/rbac/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | 4 | resources: 5 | - aggregate_dspa_role_edit.yaml 6 | - aggregate_dspa_role_view.yaml 7 | - leader_election_role_binding.yaml 8 | - leader_election_role.yaml 9 | - role_binding.yaml 10 | - role.yaml 11 | - service_account.yaml 12 | - argo_role.yaml 13 | - argo_role_binding.yaml 14 | -------------------------------------------------------------------------------- /config/rbac/leader_election_role.yaml: -------------------------------------------------------------------------------- 1 | # permissions to do leader election. 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | kind: Role 4 | metadata: 5 | labels: 6 | app.kubernetes.io/name: data-science-pipelines-operator 7 | name: leader-election-role 8 | rules: 9 | - apiGroups: 10 | - "" 11 | resources: 12 | - configmaps 13 | verbs: 14 | - get 15 | - list 16 | - watch 17 | - create 18 | - update 19 | - patch 20 | - delete 21 | - apiGroups: 22 | - coordination.k8s.io 23 | resources: 24 | - leases 25 | verbs: 26 | - get 27 | - list 28 | - watch 29 | - create 30 | - update 31 | - patch 32 | - delete 33 | - apiGroups: 34 | - "" 35 | resources: 36 | - events 37 | verbs: 38 | - create 39 | - patch 40 | -------------------------------------------------------------------------------- /config/rbac/leader_election_role_binding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: RoleBinding 3 | metadata: 4 | labels: 5 | app.kubernetes.io/name: data-science-pipelines-operator 6 | name: leader-election-rolebinding 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: Role 10 | name: leader-election-role 11 | subjects: 12 | - kind: ServiceAccount 13 | name: controller-manager 14 | namespace: datasciencepipelinesapplications-controller 15 | -------------------------------------------------------------------------------- /config/rbac/role_binding.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | labels: 5 | app.kubernetes.io/name: data-science-pipelines-operator 6 | name: manager-rolebinding 7 | roleRef: 8 | apiGroup: rbac.authorization.k8s.io 9 | kind: ClusterRole 10 | name: manager-role 11 | subjects: 12 | - kind: ServiceAccount 13 | name: controller-manager 14 | namespace: datasciencepipelinesapplications-controller 15 | -------------------------------------------------------------------------------- /config/rbac/service_account.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ServiceAccount 3 | metadata: 4 | labels: 5 | app.kubernetes.io/name: data-science-pipelines-operator 6 | name: controller-manager 7 | namespace: datasciencepipelinesapplications-controller 8 | -------------------------------------------------------------------------------- /config/samples/custom-configs/db-creds.yaml: -------------------------------------------------------------------------------- 1 | kind: Secret 2 | apiVersion: v1 3 | metadata: 4 | name: testdbsecret 5 | labels: 6 | app: mariadb-sample 7 | component: data-science-pipelines 8 | stringData: 9 | password: "testingpassword" 10 | type: Opaque 11 | -------------------------------------------------------------------------------- /config/samples/custom-configs/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - dspa.yaml 5 | - db-creds.yaml 6 | - storage-creds.yaml 7 | - ui-configmap.yaml 8 | -------------------------------------------------------------------------------- /config/samples/custom-configs/storage-creds.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: teststoragesecret 5 | labels: 6 | opendatahub.io/dashboard: 'true' 7 | opendatahub.io/managed: 'true' 8 | annotations: 9 | opendatahub.io/connection-type: s3 10 | openshift.io/display-name: Minio Data Connection 11 | data: 12 | AWS_ACCESS_KEY_ID: QUtJQUlPU0ZPRE5ON0VYQU1QTEU= 13 | AWS_SECRET_ACCESS_KEY: d0phbHJYVXRuRkVNSS9LN01ERU5HL2JQeFJmaUNZRVhBTVBMRUtFWQ== 14 | # The following keys are needed while https://github.com/kubeflow/pipelines/issues/9689 is open 15 | accesskey: QUtJQUlPU0ZPRE5ON0VYQU1QTEU= 16 | secretkey: d0phbHJYVXRuRkVNSS9LN01ERU5HL2JQeFJmaUNZRVhBTVBMRUtFWQ== 17 | type: Opaque 18 | -------------------------------------------------------------------------------- /config/samples/custom-configs/ui-configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | data: 3 | viewer-pod-template.json: |- 4 | { 5 | "spec": { 6 | "serviceAccountName": "ds-pipelines-viewer-sample" 7 | } 8 | } 9 | kind: ConfigMap 10 | metadata: 11 | name: custom-ui-configmap 12 | -------------------------------------------------------------------------------- /config/samples/custom-workflow-controller-config/custom-workflow-controller-configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | data: 3 | artifactRepository: | 4 | archiveLogs: false 5 | s3: 6 | endpoint: "http://minio-sample.kubeflow.svc.cluster.local:9000" 7 | bucket: "mlpipeline" 8 | # keyFormat is a format pattern to define how artifacts will be organized in a bucket. 9 | # It can reference workflow metadata variables such as workflow.namespace, workflow.name, 10 | # pod.name. Can also use strftime formating of workflow.creationTimestamp so that workflow 11 | # artifacts can be organized by date. If omitted, will use `\{\{workflow.name\}\}/\{\{pod.name\}\}`, 12 | # which has potential for have collisions, because names do not guarantee they are unique 13 | # over the lifetime of the cluster. 14 | # Refer to https://kubernetes.io/docs/concepts/overview/working-with-objects/names/. 15 | # 16 | # The following format looks like: 17 | # artifacts/my-workflow-abc123/2018/08/23/my-workflow-abc123-1234567890 18 | # Adding date into the path greatly reduces the chance of \{\{pod.name\}\} collision. 19 | # keyFormat: "artifacts/\{\{workflow.name\}\}/\{\{workflow.creationTimestamp.Y\}\}/\{\{workflow.creationTimestamp.m\}\}/\{\{workflow.creationTimestamp.d\}\}/\{\{pod.name\}\}" # TODO 20 | # insecure will disable TLS. Primarily used for minio installs not configured with TLS 21 | insecure: false 22 | accessKeySecret: 23 | name: "ds-pipeline-s3-sample" 24 | key: "accesskey" 25 | secretKeySecret: 26 | name: "ds-pipeline-s3-sample" 27 | key: "secretkey" 28 | executor: | 29 | # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting # TODO 30 | kind: ConfigMap 31 | metadata: 32 | name: custom-workflow-controller-configmap 33 | -------------------------------------------------------------------------------- /config/samples/custom-workflow-controller-config/dspa.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: sample 5 | spec: 6 | dspVersion: v2 7 | apiServer: 8 | enableSamplePipeline: true 9 | objectStorage: 10 | minio: 11 | deploy: true 12 | image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' 13 | mlpipelineUI: 14 | image: quay.io/opendatahub/ds-pipelines-frontend:latest 15 | workflowController: 16 | deploy: true 17 | customConfig: 'custom-workflow-controller-configmap' 18 | -------------------------------------------------------------------------------- /config/samples/custom-workflow-controller-config/kustomization.yaml: -------------------------------------------------------------------------------- 1 | resources: 2 | - dspa.yaml 3 | - custom-workflow-controller-configmap.yaml 4 | -------------------------------------------------------------------------------- /config/samples/dspa-simple/dspa_simple.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: sample 5 | spec: 6 | dspVersion: v2 7 | apiServer: 8 | enableSamplePipeline: true 9 | cABundle: 10 | configMapKey: ca.crt 11 | configMapName: kube-root-ca.crt 12 | objectStorage: 13 | # Need to enable this for artifact download links to work 14 | # i.e. for when requesting /apis/v2beta1/artifacts/{id}?share_url=true 15 | enableExternalRoute: true 16 | minio: 17 | deploy: true 18 | image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' 19 | mlpipelineUI: 20 | image: quay.io/opendatahub/ds-pipelines-frontend:latest 21 | -------------------------------------------------------------------------------- /config/samples/dspa-simple/kustomization.yaml: -------------------------------------------------------------------------------- 1 | resources: 2 | - dspa_simple.yaml 3 | -------------------------------------------------------------------------------- /config/samples/dspa_healthcheck.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: sample 5 | spec: 6 | apiServer: 7 | deploy: true 8 | enableSamplePipeline: false 9 | # If developing against a cluster using self-signed certs, then uncomment this field. 10 | # cABundle: 11 | # configMapName: kube-root-ca.crt 12 | # configMapKey: ca.crt 13 | # One of minio or externalStorage must be specified for objectStorage 14 | # This example illustrates minimal deployment with minio 15 | # This is NOT supported and should be used for dev testing/experimentation only. 16 | # See dspa_simple_external_storage.yaml for an example with external connection. 17 | objectStorage: 18 | disableHealthCheck: false 19 | enableExternalRoute: true 20 | minio: 21 | # Image field is required 22 | image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' 23 | # Optional 24 | mlpipelineUI: 25 | # Image field is required 26 | image: 'quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui' 27 | -------------------------------------------------------------------------------- /config/samples/external-object-storage/dspa.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: sample 5 | spec: 6 | dspVersion: v2 7 | apiServer: 8 | enableSamplePipeline: true 9 | objectStorage: 10 | externalStorage: 11 | bucket: rhods-dsp-dev 12 | host: s3.us-east-2.amazonaws.com 13 | region: us-east-2 14 | s3CredentialsSecret: 15 | accessKey: k8saccesskey 16 | secretKey: k8ssecretkey 17 | secretName: aws-bucket-creds 18 | scheme: https 19 | # Optional 20 | mlpipelineUI: 21 | # Image field is required 22 | image: gcr.io/ml-pipeline/frontend:2.0.2 23 | -------------------------------------------------------------------------------- /config/samples/external-object-storage/kustomization.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kustomize.config.k8s.io/v1beta1 2 | kind: Kustomization 3 | resources: 4 | - dspa.yaml 5 | -------------------------------------------------------------------------------- /config/samples/local-dev/dspa.yaml: -------------------------------------------------------------------------------- 1 | # A simple DSPA with the Database and ObjectStore Health Checks Disabled 2 | # 3 | # Since the default database and storage options leverage internal Services, 4 | # a locally-run DSPO that manages an external cluster (common development practice) 5 | # would not be able to run the pre-deploy health checks on these prerequisite components 6 | # and therefore the DSPA will never fully deploy without disabling them, as this DSPA sample does 7 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 8 | kind: DataSciencePipelinesApplication 9 | metadata: 10 | name: sample 11 | spec: 12 | dspVersion: v2 13 | apiServer: 14 | deploy: true 15 | enableSamplePipeline: true 16 | image: gcr.io/ml-pipeline/api-server:2.0.2 17 | persistenceAgent: 18 | image: gcr.io/ml-pipeline/persistenceagent:2.0.2 19 | scheduledWorkflow: 20 | image: gcr.io/ml-pipeline/scheduledworkflow:2.0.2 21 | mlmd: 22 | deploy: true 23 | grpc: 24 | image: gcr.io/tfx-oss-public/ml_metadata_store_server:1.14.0 25 | envoy: 26 | image: gcr.io/ml-pipeline/metadata-envoy:2.0.2 27 | database: 28 | disableHealthCheck: true 29 | objectStorage: 30 | disableHealthCheck: true 31 | minio: 32 | image: quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance 33 | mlpipelineUI: 34 | image: gcr.io/ml-pipeline/frontend:2.0.2 35 | workflowController: 36 | image: gcr.io/ml-pipeline/workflow-controller:v3.3.10-license-compliance 37 | -------------------------------------------------------------------------------- /config/samples/local-dev/kustomization.yaml: -------------------------------------------------------------------------------- 1 | resources: 2 | - dspa.yaml 3 | - storage-creds.yaml 4 | -------------------------------------------------------------------------------- /config/samples/local-dev/storage-creds.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: mlpipeline-minio-artifact 5 | labels: 6 | opendatahub.io/dashboard: 'true' 7 | opendatahub.io/managed: 'true' 8 | annotations: 9 | opendatahub.io/connection-type: s3 10 | openshift.io/display-name: Minio Data Connection 11 | data: 12 | accesskey: QUtJQUlPU0ZPRE5ON0VYQU1QTEU= 13 | secretkey: d0phbHJYVXRuRkVNSS9LN01ERU5HL2JQeFJmaUNZRVhBTVBMRUtFWQ== 14 | type: Opaque 15 | -------------------------------------------------------------------------------- /controllers/common.go: -------------------------------------------------------------------------------- 1 | /* 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | */ 15 | 16 | package controllers 17 | 18 | import ( 19 | dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" 20 | ) 21 | 22 | var commonTemplatesDir = "common/default" 23 | 24 | const commonCusterRolebindingTemplate = "common/no-owner/clusterrolebinding.yaml.tmpl" 25 | 26 | func (r *DSPAReconciler) ReconcileCommon(dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { 27 | log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) 28 | 29 | log.Info("Applying Common Resources") 30 | err := r.ApplyDir(dsp, params, commonTemplatesDir) 31 | if err != nil { 32 | return err 33 | } 34 | err = r.ApplyWithoutOwner(params, commonCusterRolebindingTemplate) 35 | if err != nil { 36 | return err 37 | } 38 | 39 | log.Info("Finished applying Common Resources") 40 | return nil 41 | } 42 | 43 | func (r *DSPAReconciler) CleanUpCommon(params *DSPAParams) error { 44 | err := r.DeleteResource(params, commonCusterRolebindingTemplate) 45 | if err != nil { 46 | return err 47 | } 48 | return nil 49 | } 50 | -------------------------------------------------------------------------------- /controllers/config/manifest.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2023. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | package config 18 | 19 | import ( 20 | mfc "github.com/manifestival/controller-runtime-client" 21 | mf "github.com/manifestival/manifestival" 22 | "sigs.k8s.io/controller-runtime/pkg/client" 23 | ) 24 | 25 | func Manifest(cl client.Client, templatePath string, context interface{}) (mf.Manifest, error) { 26 | pathTmplSrc, err := PathTemplateSource(templatePath, context) 27 | if err != nil { 28 | return mf.Manifest{}, err 29 | } 30 | 31 | m, err := mf.ManifestFrom(pathTmplSrc) 32 | if err != nil { 33 | return mf.Manifest{}, err 34 | } 35 | m.Client = mfc.NewClient(cl) 36 | 37 | return m, err 38 | } 39 | -------------------------------------------------------------------------------- /controllers/config/templating.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2023. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | package config 18 | 19 | import ( 20 | "bytes" 21 | "io" 22 | "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" 23 | "os" 24 | "text/template" 25 | 26 | mf "github.com/manifestival/manifestival" 27 | ) 28 | 29 | // PathPrefix is the file system path which template paths will be prefixed with. 30 | // Default is no prefix, which causes paths to be read relative to process working dir 31 | var PathPrefix string 32 | 33 | // PathTemplateSource A templating source read from a file 34 | func PathTemplateSource(path string, context interface{}) (mf.Source, error) { 35 | f, err := os.Open(prefixedPath(path)) 36 | if err != nil { 37 | return mf.Slice([]unstructured.Unstructured{}), err 38 | } 39 | 40 | tmplSrc, err := templateSource(f, context) 41 | if err != nil { 42 | return mf.Slice([]unstructured.Unstructured{}), err 43 | } 44 | 45 | return tmplSrc, nil 46 | } 47 | 48 | func prefixedPath(p string) string { 49 | if PathPrefix != "" { 50 | return PathPrefix + "/" + p 51 | } 52 | return p 53 | } 54 | 55 | // A templating manifest source 56 | func templateSource(r io.Reader, context interface{}) (mf.Source, error) { 57 | b, err := io.ReadAll(r) 58 | if err != nil { 59 | return mf.Slice([]unstructured.Unstructured{}), err 60 | } 61 | t, err := template.New("manifestTemplateDSP").Parse(string(b)) 62 | if err != nil { 63 | return mf.Slice([]unstructured.Unstructured{}), err 64 | } 65 | var b2 bytes.Buffer 66 | err = t.Execute(&b2, context) 67 | if err != nil { 68 | return mf.Slice([]unstructured.Unstructured{}), err 69 | } 70 | return mf.Reader(&b2), nil 71 | } 72 | -------------------------------------------------------------------------------- /controllers/mlpipeline_ui.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2023. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | package controllers 18 | 19 | import ( 20 | dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" 21 | ) 22 | 23 | var mlPipelineUITemplatesDir = "mlpipelines-ui" 24 | 25 | func (r *DSPAReconciler) ReconcileUI(dsp *dspav1.DataSciencePipelinesApplication, 26 | params *DSPAParams) error { 27 | 28 | log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) 29 | 30 | if dsp.Spec.MlPipelineUI == nil || !dsp.Spec.MlPipelineUI.Deploy { 31 | log.Info("Skipping Application of MlPipelineUI Resources") 32 | return nil 33 | } 34 | 35 | log.Info("Applying MlPipelineUI Resources") 36 | err := r.ApplyDir(dsp, params, mlPipelineUITemplatesDir) 37 | if err != nil { 38 | return err 39 | } 40 | 41 | log.Info("Finished applying MlPipelineUI Resources") 42 | return nil 43 | } 44 | -------------------------------------------------------------------------------- /controllers/persistence_agent.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2023. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | package controllers 18 | 19 | import ( 20 | dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" 21 | ) 22 | 23 | var persistenceAgentTemplatesDir = "persistence-agent" 24 | 25 | const persistenceAgentDefaultResourceNamePrefix = "ds-pipeline-persistenceagent-" 26 | 27 | func (r *DSPAReconciler) ReconcilePersistenceAgent(dsp *dspav1.DataSciencePipelinesApplication, 28 | params *DSPAParams) error { 29 | 30 | log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) 31 | 32 | if !dsp.Spec.PersistenceAgent.Deploy { 33 | log.Info("Skipping Application of PersistenceAgent Resources") 34 | return nil 35 | } 36 | 37 | log.Info("Applying PersistenceAgent Resources") 38 | 39 | err := r.ApplyDir(dsp, params, persistenceAgentTemplatesDir) 40 | if err != nil { 41 | return err 42 | } 43 | 44 | log.Info("Finished applying PersistenceAgent Resources") 45 | return nil 46 | } 47 | -------------------------------------------------------------------------------- /controllers/scheduled_workflow.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2023. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | package controllers 18 | 19 | import ( 20 | dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" 21 | ) 22 | 23 | var scheduledWorkflowTemplatesDir = "scheduled-workflow" 24 | 25 | const scheduledWorkflowDefaultResourceNamePrefix = "ds-pipeline-scheduledworkflow-" 26 | 27 | func (r *DSPAReconciler) ReconcileScheduledWorkflow(dsp *dspav1.DataSciencePipelinesApplication, 28 | params *DSPAParams) error { 29 | 30 | log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) 31 | 32 | if !dsp.Spec.ScheduledWorkflow.Deploy { 33 | log.Info("Skipping Application of ScheduledWorkflow Resources") 34 | return nil 35 | } 36 | 37 | log.Info("Applying ScheduledWorkflow Resources") 38 | 39 | err := r.ApplyDir(dsp, params, scheduledWorkflowTemplatesDir) 40 | if err != nil { 41 | return err 42 | } 43 | 44 | log.Info("Finished applying ScheduledWorkflow Resources") 45 | return nil 46 | } 47 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_0/config.yaml: -------------------------------------------------------------------------------- 1 | # When a minimal DSPA is deployed 2 | Images: 3 | ApiServer: api-server:test0 4 | PersistenceAgent: persistenceagent:test0 5 | ScheduledWorkflow: scheduledworkflow:test0 6 | MlmdEnvoy: mlmdenvoy:test0 7 | MlmdGRPC: mlmdgrpc:test0 8 | ArgoExecImage: argoexec:test0 9 | ArgoWorkflowController: argowfcontroller:test0 10 | LauncherImage: launcherimage:test0 11 | DriverImage: driverimage:test0 12 | OAuthProxy: oauth-proxy:test0 13 | MariaDB: mariadb:test0 14 | MlPipelineUI: frontend:test0 15 | Minio: minio:test0 16 | RuntimeGeneric: runtimegeneric:test0 17 | Toolbox: toolbox:test0 18 | RHELAI: rhelai:test0 19 | ManagedPipelinesMetadata: 20 | Instructlab: 21 | Name: "[InstructLab] LLM Training Pipeline" 22 | Description: 23 | Filepath: /pipelines/instructlab.yaml 24 | VersionName: "[InstructLab] LLM Training Pipeline" 25 | Iris: 26 | Name: "[Demo] iris-training" 27 | Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" 28 | Filepath: /samples/iris-pipeline-compiled.yaml 29 | VersionName: "[Demo] iris-training" 30 | DSPO: 31 | PlatformVersion: v0.0.0 32 | ApiServer: 33 | IncludeOwnerReference: false 34 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_0/deploy/cr.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: testdsp0 5 | spec: 6 | podToPodTLS: false 7 | apiServer: 8 | enableSamplePipeline: true 9 | argoLauncherImage: argolauncherimage:test0 10 | argoDriverImage: argodriverimage:test0 11 | objectStorage: 12 | minio: 13 | image: minio:test0 14 | mlpipelineUI: 15 | image: frontend:test0 16 | mlmd: 17 | deploy: true 18 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_0/expected/created/configmap_server_config.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: ds-pipeline-server-config-testdsp0 5 | namespace: default 6 | labels: 7 | dsp-version: v2 8 | app: ds-pipeline-testdsp0 9 | component: data-science-pipelines 10 | data: 11 | config.json: | 12 | { 13 | "DBConfig": { 14 | "MySQLConfig": { 15 | "ExtraParams": {"tls":"false"}, 16 | "GroupConcatMaxLen": "4194304" 17 | }, 18 | "PostgreSQLConfig": {}, 19 | "ConMaxLifeTime": "120s" 20 | }, 21 | "ObjectStoreConfig": { 22 | "PipelinePath": "pipelines" 23 | }, 24 | "DBDriverName": "mysql", 25 | "ARCHIVE_CONFIG_LOG_FILE_NAME": "main.log", 26 | "ARCHIVE_CONFIG_LOG_PATH_PREFIX": "/artifacts", 27 | "InitConnectionTimeout": "6m" 28 | } 29 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_0/expected/created/scheduled-workflow_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: ds-pipeline-scheduledworkflow-testdsp0 5 | namespace: default 6 | labels: 7 | dsp-version: v2 8 | app: ds-pipeline-scheduledworkflow-testdsp0 9 | component: data-science-pipelines 10 | dspa: testdsp0 11 | spec: 12 | selector: 13 | matchLabels: 14 | app: ds-pipeline-scheduledworkflow-testdsp0 15 | component: data-science-pipelines 16 | dspa: testdsp0 17 | template: 18 | metadata: 19 | annotations: 20 | cluster-autoscaler.kubernetes.io/safe-to-evict: "true" 21 | labels: 22 | dsp-version: v2 23 | app: ds-pipeline-scheduledworkflow-testdsp0 24 | component: data-science-pipelines 25 | dspa: testdsp0 26 | spec: 27 | containers: 28 | - env: 29 | - name: NAMESPACE 30 | value: "default" 31 | - name: CRON_SCHEDULE_TIMEZONE 32 | value: "UTC" 33 | image: scheduledworkflow:test0 34 | # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting 35 | name: ds-pipeline-scheduledworkflow 36 | command: 37 | - controller 38 | - "--logtostderr=true" 39 | - "--namespace=testdsp0" 40 | livenessProbe: 41 | exec: 42 | command: 43 | - test 44 | - -x 45 | - controller 46 | initialDelaySeconds: 30 47 | periodSeconds: 5 48 | timeoutSeconds: 2 49 | readinessProbe: 50 | exec: 51 | command: 52 | - test 53 | - -x 54 | - controller 55 | initialDelaySeconds: 3 56 | periodSeconds: 5 57 | timeoutSeconds: 2 58 | resources: 59 | requests: 60 | cpu: 120m 61 | memory: 100Mi 62 | limits: 63 | cpu: 250m 64 | memory: 250Mi 65 | serviceAccountName: ds-pipeline-scheduledworkflow-testdsp0 66 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_1/config.yaml: -------------------------------------------------------------------------------- 1 | Images: 2 | ApiServer: api-server:test1 3 | PersistenceAgent: persistenceagent:test1 4 | ScheduledWorkflow: scheduledworkflow:test1 5 | MlmdEnvoy: mlmdenvoy:test1 6 | MlmdGRPC: mlmdgrpc:test1 7 | ArgoExecImage: argoexec:test1 8 | ArgoWorkflowController: argowfcontroller:test1 9 | LauncherImage: launcherimage:test1 10 | DriverImage: driverimage:test1 11 | OAuthProxy: oauth-proxy:test1 12 | MariaDB: mariadb:test1 13 | MlPipelineUI: frontend:test1 14 | Minio: minio:test1 15 | RuntimeGeneric: runtimegeneric:test1 16 | Toolbox: toolbox:test1 17 | RHELAI: rhelai:test1 18 | ManagedPipelinesMetadata: 19 | Instructlab: 20 | Name: "[InstructLab] LLM Training Pipeline" 21 | Description: 22 | Filepath: /pipelines/instructlab.yaml 23 | VersionName: "[InstructLab] LLM Training Pipeline" 24 | Iris: 25 | Name: "[Demo] iris-training" 26 | Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" 27 | Filepath: /samples/iris-pipeline-compiled.yaml 28 | VersionName: "[Demo] iris-training" 29 | DSPO: 30 | PlatformVersion: v0.0.0 31 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_1/deploy/cr.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: testdsp1 5 | spec: 6 | podToPodTLS: false 7 | apiServer: 8 | deploy: false 9 | persistenceAgent: 10 | deploy: false 11 | scheduledWorkflow: 12 | deploy: false 13 | mlpipelineUI: 14 | deploy: false 15 | image: frontend:test0 16 | database: 17 | mariaDB: 18 | deploy: false 19 | objectStorage: 20 | minio: 21 | deploy: false 22 | image: minio:test0 23 | mlmd: 24 | # curently logic requires mlmd 25 | # probably should make this consistent 26 | # with other components 27 | deploy: true 28 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_1/expected/not_created/apiserver_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: ds-pipeline-testdsp1 5 | namespace: default 6 | spec: 7 | selector: 8 | matchLabels: {} 9 | template: 10 | metadata: 11 | labels: {} 12 | spec: 13 | containers: [] 14 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_1/expected/not_created/mariadb_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: mariadb-testdsp1 5 | namespace: default 6 | spec: 7 | selector: 8 | matchLabels: {} 9 | template: 10 | metadata: 11 | labels: {} 12 | spec: 13 | containers: [] 14 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_1/expected/not_created/minio_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: minio-testdsp1 5 | namespace: default 6 | spec: 7 | selector: 8 | matchLabels: {} 9 | template: 10 | metadata: 11 | labels: {} 12 | spec: 13 | containers: [] 14 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_1/expected/not_created/mlpipelines-ui_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: ds-pipeline-ui-testdsp1 5 | namespace: default 6 | spec: 7 | selector: 8 | matchLabels: {} 9 | template: 10 | metadata: 11 | labels: {} 12 | spec: 13 | containers: [] 14 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_1/expected/not_created/persistence-agent_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: ds-pipeline-persistenceagent-testdsp1 5 | namespace: default 6 | spec: 7 | selector: 8 | matchLabels: {} 9 | template: 10 | metadata: 11 | labels: {} 12 | spec: 13 | containers: [] 14 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_1/expected/not_created/scheduled-workflow_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: ds-pipeline-scheduledworkflow-testdsp1 5 | namespace: default 6 | spec: 7 | selector: 8 | matchLabels: {} 9 | template: 10 | metadata: 11 | labels: {} 12 | spec: 13 | containers: [] 14 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_2/config.yaml: -------------------------------------------------------------------------------- 1 | Images: 2 | ApiServer: api-server:test2 3 | PersistenceAgent: persistenceagent:test2 4 | ScheduledWorkflow: scheduledworkflow:test2 5 | MlmdEnvoy: mlmdenvoy:test2 6 | MlmdGRPC: mlmdgrpc:test2 7 | ArgoExecImage: argoexec:test2 8 | ArgoWorkflowController: argowfcontroller:test2 9 | LauncherImage: launcherimage:test2 10 | DriverImage: driverimage:test2 11 | OAuthProxy: oauth-proxy:test2 12 | MariaDB: mariadb:test2 13 | MlPipelineUI: frontend:test2 14 | Minio: minio:test2 15 | RuntimeGeneric: runtimegeneric:test2 16 | Toolbox: toolbox:test2 17 | RHELAI: rhelai:test2 18 | ManagedPipelinesMetadata: 19 | InstructLab: 20 | Name: InstructLabName 21 | Description: InstructLabDescription 22 | Filepath: InstructLabFilepath 23 | VersionName: InstructLabVersionName 24 | VersionDescription: InstructLabVersionDescription 25 | Iris: 26 | Name: "[Demo] iris-training" 27 | Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" 28 | Filepath: /samples/iris-pipeline-compiled.yaml 29 | VersionName: "[Demo] iris-training" 30 | DSPO: 31 | PlatformVersion: v1.2.3 32 | FIPSEnabled: true 33 | ApiServer: 34 | IncludeOwnerReference: false 35 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_2/expected/created/sample-config.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: sample-config-testdsp2 5 | namespace: default 6 | labels: 7 | dsp-version: v2 8 | app: ds-pipeline-testdsp2 9 | component: data-science-pipelines 10 | data: 11 | sample_config.json: |- 12 | {"loadSamplesOnRestart":true,"pipelines":[{"description":"InstructLabDescription","file":"InstructLabFilepath","name":"InstructLabName","versionDescription":"InstructLabVersionDescription","versionName":"InstructLabVersionName - v1.2.3"},{"description":"[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow","file":"/samples/iris-pipeline-compiled.yaml","name":"[Demo] iris-training","versionDescription":"","versionName":"[Demo] iris-training - v1.2.3"}]} 13 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_2/expected/created/scheduled-workflow_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: ds-pipeline-scheduledworkflow-testdsp2 5 | namespace: default 6 | labels: 7 | dsp-version: v2 8 | app: ds-pipeline-scheduledworkflow-testdsp2 9 | component: data-science-pipelines 10 | dspa: testdsp2 11 | spec: 12 | selector: 13 | matchLabels: 14 | app: ds-pipeline-scheduledworkflow-testdsp2 15 | component: data-science-pipelines 16 | dspa: testdsp2 17 | template: 18 | metadata: 19 | annotations: 20 | cluster-autoscaler.kubernetes.io/safe-to-evict: "true" 21 | labels: 22 | dsp-version: v2 23 | app: ds-pipeline-scheduledworkflow-testdsp2 24 | component: data-science-pipelines 25 | dspa: testdsp2 26 | spec: 27 | containers: 28 | - env: 29 | - name: NAMESPACE 30 | value: "default" 31 | - name: CRON_SCHEDULE_TIMEZONE 32 | value: "EST" 33 | image: scheduledworkflow:test2 34 | # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting 35 | name: ds-pipeline-scheduledworkflow 36 | command: 37 | - controller 38 | - "--logtostderr=true" 39 | - "--namespace=default" 40 | livenessProbe: 41 | exec: 42 | command: 43 | - test 44 | - -x 45 | - controller 46 | initialDelaySeconds: 30 47 | periodSeconds: 5 48 | timeoutSeconds: 2 49 | readinessProbe: 50 | exec: 51 | command: 52 | - test 53 | - -x 54 | - controller 55 | initialDelaySeconds: 3 56 | periodSeconds: 5 57 | timeoutSeconds: 2 58 | resources: 59 | requests: 60 | cpu: 1235m 61 | memory: 1Gi 62 | limits: 63 | cpu: 2526m 64 | memory: 5Gi 65 | serviceAccountName: ds-pipeline-scheduledworkflow-testdsp2 66 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_3/config.yaml: -------------------------------------------------------------------------------- 1 | Images: 2 | ApiServer: api-server:test3 3 | PersistenceAgent: persistenceagent:test3 4 | ScheduledWorkflow: scheduledworkflow:test3 5 | MlmdEnvoy: mlmdenvoy:test3 6 | MlmdGRPC: mlmdgrpc:test3 7 | ArgoExecImage: argoexec:test3 8 | ArgoWorkflowController: argowfcontroller:test3 9 | LauncherImage: launcherimage:test3 10 | DriverImage: driverimage:test3 11 | OAuthProxy: oauth-proxy:test3 12 | MariaDB: mariadb:test3 13 | MlPipelineUI: frontend:test3 14 | Minio: minio:test3 15 | RuntimeGeneric: runtimegeneric:test3 16 | Toolbox: toolbox:test3 17 | RHELAI: rhelai:test3 18 | ManagedPipelinesMetadata: 19 | Instructlab: 20 | Name: "[InstructLab] LLM Training Pipeline" 21 | Description: 22 | Filepath: /pipelines/instructlab.yaml 23 | VersionName: "[InstructLab] LLM Training Pipeline" 24 | Iris: 25 | Name: "[Demo] iris-training" 26 | Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" 27 | Filepath: /samples/iris-pipeline-compiled.yaml 28 | VersionName: "[Demo] iris-training" 29 | DSPO: 30 | PlatformVersion: v0.0.0 31 | ApiServer: 32 | IncludeOwnerReference: false 33 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_3/deploy/00_secret.yaml: -------------------------------------------------------------------------------- 1 | kind: Secret 2 | apiVersion: v1 3 | metadata: 4 | name: testdbpswsecretname3 5 | stringData: 6 | testpswkey3: testdbsecretpswvalue3 7 | type: Opaque 8 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_3/deploy/01_secret.yaml: -------------------------------------------------------------------------------- 1 | kind: Secret 2 | apiVersion: v1 3 | metadata: 4 | name: teststoragesecretname3 5 | stringData: 6 | testaccesskey3: testaccesskeyvalue3 7 | testsecretkey3: testsecretkeyvalue3 8 | type: Opaque 9 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_3/deploy/02_cr.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: testdsp3 5 | spec: 6 | podToPodTLS: false 7 | apiServer: 8 | enableOauth: true 9 | enableSamplePipeline: false 10 | deploy: true 11 | argoLauncherImage: argolauncherimage:test3 12 | argoDriverImage: argodriverimage:test3 13 | persistenceAgent: {} 14 | scheduledWorkflow: {} 15 | mlmd: 16 | deploy: true 17 | database: 18 | externalDB: 19 | host: testdbhost3 20 | passwordSecret: 21 | key: testpswkey3 22 | name: testdbpswsecretname3 23 | pipelineDBName: testdbname3 24 | port: test3 25 | username: testuser3 26 | objectStorage: 27 | externalStorage: 28 | port: '80' 29 | bucket: testbucket3 30 | host: teststoragehost3 31 | s3CredentialsSecret: 32 | accessKey: testaccesskey3 33 | secretKey: testsecretkey3 34 | secretName: teststoragesecretname3 35 | scheme: https 36 | mlpipelineUI: 37 | deploy: false 38 | image: frontend:test3 39 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_3/expected/not_created/database_secret.yaml: -------------------------------------------------------------------------------- 1 | kind: Secret 2 | apiVersion: v1 3 | metadata: 4 | # todo: remove todo- from name this should actually be checked for but causes failures because previous tests don't clean up properly 5 | name: todo-ds-pipeline-db-testdsp3 6 | namespace: namespace3 7 | labels: 8 | app: mariadb-extrenal-storage 9 | component: data-science-pipelines 10 | data: 11 | password: dGVzdGRic2VjcmV0cHN3dmFsdWUz 12 | type: Opaque 13 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_3/expected/not_created/sample-config.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: sample-config-testdsp3 5 | namespace: default 6 | labels: 7 | app: ds-pipeline-testdsp3 8 | component: data-science-pipelines 9 | data: {} 10 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_3/expected/not_created/sample-pipeline.yaml.tmpl: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: sample-pipeline-testdsp3 5 | namespace: default 6 | labels: 7 | app: ds-pipeline-testdsp3 8 | component: data-science-pipelines 9 | data: {} 10 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_3/expected/not_created/storage_secret.yaml: -------------------------------------------------------------------------------- 1 | kind: Secret 2 | apiVersion: v1 3 | metadata: 4 | # todo: remove todo- this should actually be checked for but causes failures because previous tests don't clean up properly 5 | name: todo-ds-pipeline-s3-testdsp3 6 | namespace: namespace3 7 | labels: 8 | app: minio-extrenal-storage 9 | component: data-science-pipelines 10 | data: 11 | accesskey: dGVzdGFjY2Vzc2tleXZhbHVlMw== 12 | host: dGVzdHN0b3JhZ2Vob3N0Mw== 13 | port: ODA= 14 | secretkey: dGVzdHNlY3JldGtleXZhbHVlMw== 15 | secure: dHJ1ZQ== 16 | type: Opaque 17 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_4/config.yaml: -------------------------------------------------------------------------------- 1 | Images: 2 | ApiServer: api-server:test4 3 | PersistenceAgent: persistenceagent:test4 4 | ScheduledWorkflow: scheduledworkflow:test4 5 | MlmdEnvoy: mlmdenvoy:test4 6 | MlmdGRPC: mlmdgrpc:test4 7 | ArgoExecImage: argoexec:test4 8 | ArgoWorkflowController: argowfcontroller:test4 9 | LauncherImage: launcherimage:test4 10 | DriverImage: driverimage:test4 11 | OAuthProxy: oauth-proxy:test4 12 | MariaDB: mariadb:test4 13 | MlPipelineUI: frontend:test4 14 | Minio: minio:test4 15 | RuntimeGeneric: runtimegeneric:test4 16 | Toolbox: toolbox:test4 17 | RHELAI: rhelai:test4 18 | ManagedPipelinesMetadata: 19 | Instructlab: 20 | Name: "[InstructLab] LLM Training Pipeline" 21 | Description: 22 | Filepath: /pipelines/instructlab.yaml 23 | VersionName: "[InstructLab] LLM Training Pipeline" 24 | Iris: 25 | Name: "[Demo] iris-training" 26 | Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" 27 | Filepath: /samples/iris-pipeline-compiled.yaml 28 | VersionName: "[Demo] iris-training" 29 | DSPO: 30 | PlatformVersion: v0.0.0 31 | ApiServer: 32 | IncludeOwnerReference: false 33 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_4/expected/created/scheduled-workflow_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: ds-pipeline-scheduledworkflow-testdsp4 5 | namespace: default 6 | labels: 7 | dsp-version: v2 8 | app: ds-pipeline-scheduledworkflow-testdsp4 9 | component: data-science-pipelines 10 | dspa: testdsp4 11 | spec: 12 | selector: 13 | matchLabels: 14 | app: ds-pipeline-scheduledworkflow-testdsp4 15 | component: data-science-pipelines 16 | dspa: testdsp4 17 | template: 18 | metadata: 19 | annotations: 20 | cluster-autoscaler.kubernetes.io/safe-to-evict: "true" 21 | labels: 22 | dsp-version: v2 23 | app: ds-pipeline-scheduledworkflow-testdsp4 24 | component: data-science-pipelines 25 | dspa: testdsp4 26 | spec: 27 | containers: 28 | - env: 29 | - name: NAMESPACE 30 | value: "default" 31 | - name: CRON_SCHEDULE_TIMEZONE 32 | value: "EST" 33 | image: this-scheduledworkflow-image-from-cr-should-be-used:test4 34 | # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting 35 | name: ds-pipeline-scheduledworkflow 36 | command: 37 | - controller 38 | - "--logtostderr=true" 39 | - "--namespace=default" 40 | livenessProbe: 41 | exec: 42 | command: 43 | - test 44 | - -x 45 | - controller 46 | initialDelaySeconds: 30 47 | periodSeconds: 5 48 | timeoutSeconds: 2 49 | readinessProbe: 50 | exec: 51 | command: 52 | - test 53 | - -x 54 | - controller 55 | initialDelaySeconds: 3 56 | periodSeconds: 5 57 | timeoutSeconds: 2 58 | resources: 59 | requests: 60 | cpu: 1235m 61 | memory: 1Gi 62 | limits: 63 | cpu: 2526m 64 | memory: 5Gi 65 | serviceAccountName: ds-pipeline-scheduledworkflow-testdsp4 66 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_5/config.yaml: -------------------------------------------------------------------------------- 1 | Images: 2 | ApiServer: api-server:test5 3 | PersistenceAgent: persistenceagent:test5 4 | ScheduledWorkflow: scheduledworkflow:test5 5 | MlmdEnvoy: mlmdenvoy:test5 6 | MlmdGRPC: mlmdgrpc:test5 7 | ArgoExecImage: argoexec:test5 8 | ArgoWorkflowController: argowfcontroller:test5 9 | LauncherImage: launcherimage:test5 10 | DriverImage: driverimage:test5 11 | OAuthProxy: oauth-proxy:test5 12 | MariaDB: mariadb:test5 13 | MlPipelineUI: frontend:test5 14 | Minio: minio:test5 15 | RuntimeGeneric: runtimegeneric:test5 16 | Toolbox: toolbox:test5 17 | RHELAI: rhelai:test5 18 | ManagedPipelinesMetadata: 19 | Instructlab: 20 | Name: "[InstructLab] LLM Training Pipeline" 21 | Description: 22 | Filepath: /pipelines/instructlab.yaml 23 | VersionName: "[InstructLab] LLM Training Pipeline" 24 | Iris: 25 | Name: "[Demo] iris-training" 26 | Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" 27 | Filepath: /samples/iris-pipeline-compiled.yaml 28 | VersionName: "[Demo] iris-training" 29 | DSPO: 30 | PlatformVersion: v0.0.0 31 | ApiServer: 32 | IncludeOwnerReference: false 33 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_5/deploy/00_configmap.yaml: -------------------------------------------------------------------------------- 1 | kind: ConfigMap 2 | apiVersion: v1 3 | metadata: 4 | name: testcabundleconfigmap5 5 | data: 6 | testcabundleconfigmapkey5.crt: | 7 | -----BEGIN CERTIFICATE----- 8 | MIIFLTCCAxWgAwIBAgIUIvY4jV0212P/ddjuCZhcUyJfoocwDQYJKoZIhvcNAQEL 9 | BQAwJjELMAkGA1UEBhMCWFgxFzAVBgNVBAMMDnJoLWRzcC1kZXZzLmlvMB4XDTI0 10 | MDMwNTAxMTExN1oXDTM0MDMwMzAxMTExN1owJjELMAkGA1UEBhMCWFgxFzAVBgNV 11 | BAMMDnJoLWRzcC1kZXZzLmlvMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC 12 | AgEAnCxNdQ0EUhswfu8/K6icQKc//2xpTvcp9Bn9QZ9UUy3f2UXv5hvd4W2PM/uX 13 | FaZGoEzQsYagbjyuHDBxek8YOZvdRx9h7O+LLfN+DXeLbaY6tZ2AxNWwcaAmG0EH 14 | nSDVORrk8/aZfFRoxgQigWyuK28YZn2SopjNyvOc8GkNjCFO4y7g4QuzWdGMgMIA 15 | +whtt3EuYIwaRourKNFp4oR4InOVdPfuGezxbKRPcFfey1JEdTxGoWnHC+HDDMCf 16 | R2vV8hAQB4fdvbOoz3+S7j7d8YiaFBK/P2us6Il5tsUw4kzhD2/OLzyERB7SloZk 17 | NiIcSsU0USRGLb4/ybQsxu9UPIXUlKTK70HxIEIdPSPPMM84khIOuax0QXKORFHT 18 | Ti9jgEfXjuX/2RPijQoCMDrqRQvDxExnTVMncqud6PeDxOWfvSG4oyZBr4HgNAap 19 | wX7FWEY6SOH0e3GrH9ceI3afDO4A4YR+EE426GgHgYe8g4NTfD1D79+txmSY6VvV 20 | MBwEvPo1LJVmvz23HBC60+e6Ld3WjwE+viOktt20R5Td3NPj7qcBlMDs105yiz+l 21 | Ex1h/WDrAssETrelppg3Xgkkz+iY5RwiUB2BTzeiiDbN+AE6X+S5c61Izc2qAeH2 22 | gVrvMDlAK6t6bQ696TzItdAs5SnXauxPjfwmK+F65SYy7z8CAwEAAaNTMFEwHQYD 23 | VR0OBBYEFDj7l4fu0pXChZsXU5Cgsmr5TYq7MB8GA1UdIwQYMBaAFDj7l4fu0pXC 24 | hZsXU5Cgsmr5TYq7MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIB 25 | AGr5DblOsH7JE9JM3M4p4eiXD40B/VIACEDMYJvyr6QjmcT8+XnHkiu7OV3OJV/G 26 | S4NKhleBhfpaaP2ZPGO/vUTmqXwcK78jl0WEjPrMVjs1eDoSnUNi+KwFTBypIusD 27 | gSEnICXa26v1CHCQG0QB+rUrIxJqjtq+bnlw/Ns1wxTYfZBFW1ykCJuMsekPo0pN 28 | yTH1eWr0eSVWgljqHKaUjKbRRTSTWvk2Sewaq004W+6QOSb3nb1+GHVMov/Q6vsz 29 | j6/3B7+7wybR80UTBI/1DfTlefQaOOgEPBjQZ92NXSxMKe2J7FPD+7NHvwTNzzVD 30 | jg3cmW8pbtLEyxa+C+6EN8xnmklVfyzuzVsRJvrZvzYcOgLK2ji35oq9FYGXm0yH 31 | HRpQPBFkcgNedD3qrJNYKkIBiAh2SSKKA+J8eP3uD9NUOScgl2aKVz/phU5rSDwt 32 | NlhRuX8sS7q4gpL9qk4jWrMb8tNeN5nYRvmJj+Slf9sQSTfvukKo+2X8GpAecQNC 33 | z6OeQyN+3C2zm4cLCHHWC0ZR/iHQyHIVKlFXznWe6qA64o4x1A0GurjVMAw0Pe0v 34 | WBV3KJBsYK/wijtLeip1oKobU76oE0ML/bnhV10k6usvl4n8cDmcONo5FnGoT8Pk 35 | 80htx6w5fanMFu4MnoBeyJhhzNfg7ywJcc2VZSM27s2B 36 | -----END CERTIFICATE----- 37 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_5/deploy/01_configmap.yaml: -------------------------------------------------------------------------------- 1 | kind: ConfigMap 2 | apiVersion: v1 3 | metadata: 4 | name: openshift-service-ca.crt 5 | data: 6 | service-ca.crt: | 7 | -----BEGIN CERTIFICATE----- 8 | MIIFLTCCAxWgAwIBAgIUIvY4jV0212P/ddjuCZhcUyJfoocwDQYJKoZIhvcNAQEL 9 | BQAwJjELMAkGA1UEBhMCWFgxFzAVBgNVBAMMDnJoLWRzcC1kZXZzLmlvMB4XDTI0 10 | MDMwNTAxMTExN1oXDTM0MDMwMzAxMTExN1owJjELMAkGA1UEBhMCWFgxFzAVBgNV 11 | BAMMDnJoLWRzcC1kZXZzLmlvMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC 12 | AgEAnCxNdQ0EUhswfu8/K6icQKc//2xpTvcp9Bn9QZ9UUy3f2UXv5hvd4W2PM/uX 13 | FaZGoEzQsYagbjyuHDBxek8YOZvdRx9h7O+LLfN+DXeLbaY6tZ2AxNWwcaAmG0EH 14 | nSDVORrk8/aZfFRoxgQigWyuK28YZn2SopjNyvOc8GkNjCFO4y7g4QuzWdGMgMIA 15 | +whtt3EuYIwaRourKNFp4oR4InOVdPfuGezxbKRPcFfey1JEdTxGoWnHC+HDDMCf 16 | R2vV8hAQB4fdvbOoz3+S7j7d8YiaFBK/P2us6Il5tsUw4kzhD2/OLzyERB7SloZk 17 | NiIcSsU0USRGLb4/ybQsxu9UPIXUlKTK70HxIEIdPSPPMM84khIOuax0QXKORFHT 18 | Ti9jgEfXjuX/2RPijQoCMDrqRQvDxExnTVMncqud6PeDxOWfvSG4oyZBr4HgNAap 19 | wX7FWEY6SOH0e3GrH9ceI3afDO4A4YR+EE426GgHgYe8g4NTfD1D79+txmSY6VvV 20 | MBwEvPo1LJVmvz23HBC60+e6Ld3WjwE+viOktt20R5Td3NPj7qcBlMDs105yiz+l 21 | Ex1h/WDrAssETrelppg3Xgkkz+iY5RwiUB2BTzeiiDbN+AE6X+S5c61Izc2qAeH2 22 | gVrvMDlAK6t6bQ696TzItdAs5SnXauxPjfwmK+F65SYy7z8CAwEAAaNTMFEwHQYD 23 | VR0OBBYEFDj7l4fu0pXChZsXU5Cgsmr5TYq7MB8GA1UdIwQYMBaAFDj7l4fu0pXC 24 | hZsXU5Cgsmr5TYq7MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIB 25 | AGr5DblOsH7JE9JM3M4p4eiXD40B/VIACEDMYJvyr6QjmcT8+XnHkiu7OV3OJV/G 26 | S4NKhleBhfpaaP2ZPGO/vUTmqXwcK78jl0WEjPrMVjs1eDoSnUNi+KwFTBypIusD 27 | gSEnICXa26v1CHCQG0QB+rUrIxJqjtq+bnlw/Ns1wxTYfZBFW1ykCJuMsekPo0pN 28 | yTH1eWr0eSVWgljqHKaUjKbRRTSTWvk2Sewaq004W+6QOSb3nb1+GHVMov/Q6vsz 29 | j6/3B7+7wybR80UTBI/1DfTlefQaOOgEPBjQZ92NXSxMKe2J7FPD+7NHvwTNzzVD 30 | jg3cmW8pbtLEyxa+C+6EN8xnmklVfyzuzVsRJvrZvzYcOgLK2ji35oq9FYGXm0yH 31 | HRpQPBFkcgNedD3qrJNYKkIBiAh2SSKKA+J8eP3uD9NUOScgl2aKVz/phU5rSDwt 32 | NlhRuX8sS7q4gpL9qk4jWrMb8tNeN5nYRvmJj+Slf9sQSTfvukKo+2X8GpAecQNC 33 | z6OeQyN+3C2zm4cLCHHWC0ZR/iHQyHIVKlFXznWe6qA64o4x1A0GurjVMAw0Pe0v 34 | WBV3KJBsYK/wijtLeip1oKobU76oE0ML/bnhV10k6usvl4n8cDmcONo5FnGoT8Pk 35 | 80htx6w5fanMFu4MnoBeyJhhzNfg7ywJcc2VZSM27s2B 36 | -----END CERTIFICATE----- 37 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_5/deploy/02_cr.yaml: -------------------------------------------------------------------------------- 1 | # Test: 2 | # DSPA CA bundle, ensure user provided CA Bundle results in dsp-trusted-ca config map creation and utilization in artifact config. 3 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 4 | kind: DataSciencePipelinesApplication 5 | metadata: 6 | name: testdsp5 7 | spec: 8 | podToPodTLS: true 9 | dspVersion: v2 10 | objectStorage: 11 | minio: 12 | image: minio:test5 13 | database: 14 | mariaDB: 15 | deploy: true 16 | mlmd: 17 | deploy: true 18 | apiServer: 19 | deploy: true 20 | enableSamplePipeline: false 21 | caBundleFileName: testcabundleconfigmapkey5.crt 22 | cABundle: 23 | configMapName: testcabundleconfigmap5 24 | configMapKey: testcabundleconfigmapkey5.crt 25 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_6/config.yaml: -------------------------------------------------------------------------------- 1 | Images: 2 | ApiServer: api-server:test6 3 | PersistenceAgent: persistenceagent:test6 4 | ScheduledWorkflow: scheduledworkflow:test6 5 | MlmdEnvoy: mlmdenvoy:test6 6 | MlmdGRPC: mlmdgrpc:test6 7 | ArgoExecImage: argoexec:test6 8 | ArgoWorkflowController: argowfcontroller:test6 9 | LauncherImage: launcherimage:test6 10 | DriverImage: driverimage:test6 11 | OAuthProxy: oauth-proxy:test6 12 | MariaDB: mariadb:test6 13 | MlPipelineUI: frontend:test6 14 | Minio: minio:test6 15 | RuntimeGeneric: runtimegeneric:test6 16 | Toolbox: toolbox:test6 17 | RHELAI: rhelai:test6 18 | ManagedPipelinesMetadata: 19 | Instructlab: 20 | Name: "[InstructLab] LLM Training Pipeline" 21 | Description: 22 | Filepath: /pipelines/instructlab.yaml 23 | VersionName: "[InstructLab] LLM Training Pipeline" 24 | Iris: 25 | Name: "[Demo] iris-training" 26 | Description: "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow" 27 | Filepath: /samples/iris-pipeline-compiled.yaml 28 | VersionName: "[Demo] iris-training" 29 | DSPO: 30 | PlatformVersion: v0.0.0 31 | ApiServer: 32 | IncludeOwnerReference: false 33 | -------------------------------------------------------------------------------- /controllers/testdata/declarative/case_6/deploy/00_cr.yaml: -------------------------------------------------------------------------------- 1 | # Test: 2 | # podToPodTLS = false, should disable any tls configs for apiserver, pa, and kfp ui 3 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 4 | kind: DataSciencePipelinesApplication 5 | metadata: 6 | name: testdsp6 7 | spec: 8 | podToPodTLS: false 9 | objectStorage: 10 | minio: 11 | image: minio:test6 12 | database: 13 | mariaDB: 14 | deploy: true 15 | mlpipelineUI: 16 | deploy: true 17 | image: frontend:test6 18 | mlmd: 19 | deploy: true 20 | apiServer: 21 | deploy: true 22 | enableOauth: true 23 | enableSamplePipeline: false 24 | -------------------------------------------------------------------------------- /controllers/testdata/tls/ca-bundle.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIFLTCCAxWgAwIBAgIUIvY4jV0212P/ddjuCZhcUyJfoocwDQYJKoZIhvcNAQEL 3 | BQAwJjELMAkGA1UEBhMCWFgxFzAVBgNVBAMMDnJoLWRzcC1kZXZzLmlvMB4XDTI0 4 | MDMwNTAxMTExN1oXDTM0MDMwMzAxMTExN1owJjELMAkGA1UEBhMCWFgxFzAVBgNV 5 | BAMMDnJoLWRzcC1kZXZzLmlvMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC 6 | AgEAnCxNdQ0EUhswfu8/K6icQKc//2xpTvcp9Bn9QZ9UUy3f2UXv5hvd4W2PM/uX 7 | FaZGoEzQsYagbjyuHDBxek8YOZvdRx9h7O+LLfN+DXeLbaY6tZ2AxNWwcaAmG0EH 8 | nSDVORrk8/aZfFRoxgQigWyuK28YZn2SopjNyvOc8GkNjCFO4y7g4QuzWdGMgMIA 9 | +whtt3EuYIwaRourKNFp4oR4InOVdPfuGezxbKRPcFfey1JEdTxGoWnHC+HDDMCf 10 | R2vV8hAQB4fdvbOoz3+S7j7d8YiaFBK/P2us6Il5tsUw4kzhD2/OLzyERB7SloZk 11 | NiIcSsU0USRGLb4/ybQsxu9UPIXUlKTK70HxIEIdPSPPMM84khIOuax0QXKORFHT 12 | Ti9jgEfXjuX/2RPijQoCMDrqRQvDxExnTVMncqud6PeDxOWfvSG4oyZBr4HgNAap 13 | wX7FWEY6SOH0e3GrH9ceI3afDO4A4YR+EE426GgHgYe8g4NTfD1D79+txmSY6VvV 14 | MBwEvPo1LJVmvz23HBC60+e6Ld3WjwE+viOktt20R5Td3NPj7qcBlMDs105yiz+l 15 | Ex1h/WDrAssETrelppg3Xgkkz+iY5RwiUB2BTzeiiDbN+AE6X+S5c61Izc2qAeH2 16 | gVrvMDlAK6t6bQ696TzItdAs5SnXauxPjfwmK+F65SYy7z8CAwEAAaNTMFEwHQYD 17 | VR0OBBYEFDj7l4fu0pXChZsXU5Cgsmr5TYq7MB8GA1UdIwQYMBaAFDj7l4fu0pXC 18 | hZsXU5Cgsmr5TYq7MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIB 19 | AGr5DblOsH7JE9JM3M4p4eiXD40B/VIACEDMYJvyr6QjmcT8+XnHkiu7OV3OJV/G 20 | S4NKhleBhfpaaP2ZPGO/vUTmqXwcK78jl0WEjPrMVjs1eDoSnUNi+KwFTBypIusD 21 | gSEnICXa26v1CHCQG0QB+rUrIxJqjtq+bnlw/Ns1wxTYfZBFW1ykCJuMsekPo0pN 22 | yTH1eWr0eSVWgljqHKaUjKbRRTSTWvk2Sewaq004W+6QOSb3nb1+GHVMov/Q6vsz 23 | j6/3B7+7wybR80UTBI/1DfTlefQaOOgEPBjQZ92NXSxMKe2J7FPD+7NHvwTNzzVD 24 | jg3cmW8pbtLEyxa+C+6EN8xnmklVfyzuzVsRJvrZvzYcOgLK2ji35oq9FYGXm0yH 25 | HRpQPBFkcgNedD3qrJNYKkIBiAh2SSKKA+J8eP3uD9NUOScgl2aKVz/phU5rSDwt 26 | NlhRuX8sS7q4gpL9qk4jWrMb8tNeN5nYRvmJj+Slf9sQSTfvukKo+2X8GpAecQNC 27 | z6OeQyN+3C2zm4cLCHHWC0ZR/iHQyHIVKlFXznWe6qA64o4x1A0GurjVMAw0Pe0v 28 | WBV3KJBsYK/wijtLeip1oKobU76oE0ML/bnhV10k6usvl4n8cDmcONo5FnGoT8Pk 29 | 80htx6w5fanMFu4MnoBeyJhhzNfg7ywJcc2VZSM27s2B 30 | -----END CERTIFICATE----- -------------------------------------------------------------------------------- /controllers/testdata/tls/dummy-ca-bundle.crt: -------------------------------------------------------------------------------- 1 | dummycontent -------------------------------------------------------------------------------- /controllers/testdata/tls/empty-ca-bundle.crt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/704410ad06578bbad915cdf57837e089f8b066fa/controllers/testdata/tls/empty-ca-bundle.crt -------------------------------------------------------------------------------- /controllers/workflow_controller.go: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2023. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | 17 | package controllers 18 | 19 | import ( 20 | dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" 21 | ) 22 | 23 | var workflowControllerTemplatesDir = "workflow-controller" 24 | 25 | func (r *DSPAReconciler) ReconcileWorkflowController(dsp *dspav1.DataSciencePipelinesApplication, 26 | params *DSPAParams) error { 27 | 28 | log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) 29 | 30 | if dsp.Spec.WorkflowController == nil || !dsp.Spec.WorkflowController.Deploy { 31 | log.Info("Skipping Application of WorkflowController Resources") 32 | return nil 33 | } 34 | 35 | log.Info("Applying WorkflowController Resources") 36 | 37 | err := r.ApplyDir(dsp, params, workflowControllerTemplatesDir) 38 | if err != nil { 39 | return err 40 | } 41 | 42 | log.Info("Finished applying WorkflowController Resources") 43 | return nil 44 | } 45 | -------------------------------------------------------------------------------- /datasciencecluster/README.md: -------------------------------------------------------------------------------- 1 | # Deploy latest DSPO via ODH 2 | 3 | To deploy the latest DSPO using the changes within this repo via Open Data Hub you can follow these steps 4 | 5 | 6 | ## Pre-requisites 7 | 1. An OpenShift cluster that is 4.10 or higher. 8 | 2. You will need to be logged into this cluster as [cluster admin] via [oc client]. 9 | 3. The OpenShift Cluster must have OpenShift Pipelines 1.9 or higher installed. Instructions [here][OCP Pipelines Operator]. 10 | 4. The Open Data Hub operator needs to be installed. You can install it via [OperatorHub][installodh]. 11 | 12 | 13 | ## Deploy DataScienceCluster 14 | 15 | Clone this repository then run the following commands: 16 | 17 | ```bash 18 | # If this namespace does not exist 19 | oc new-project opendatahub 20 | 21 | # Then run 22 | oc apply -f data-science-pipelines-operator/datasciencecluster/datasciencecluster.yaml -n opendatahub 23 | ``` 24 | 25 | Once done, follow the steps outlined [here][dspa] to get started with deploying your own 26 | `DataSciencePipelinesApplication` with the latest changes found within this repository. 27 | 28 | [cluster admin]: https://docs.openshift.com/container-platform/4.12/authentication/using-rbac.html#creating-cluster-admin_using-rbac 29 | [oc client]: https://mirror.openshift.com/pub/openshift-v4/x86_64/clients/ocp/latest/openshift-client-linux.tar.gz 30 | [OCP Pipelines Operator]: https://docs.openshift.com/container-platform/4.12/cicd/pipelines/installing-pipelines.html#op-installing-pipelines-operator-in-web-console_installing-pipelines 31 | [installodh]: https://opendatahub.io/docs/getting-started/quick-installation.html 32 | [dspa]: https://github.com/opendatahub-io/data-science-pipelines-operator#deploy-dsp-instance 33 | -------------------------------------------------------------------------------- /datasciencecluster/datasciencecluster.yaml: -------------------------------------------------------------------------------- 1 | kind: DataScienceCluster 2 | apiVersion: datasciencecluster.opendatahub.io/v1 3 | metadata: 4 | name: data-science-pipelines-operator 5 | spec: 6 | components: 7 | datasciencepipelines: 8 | managementState: Managed 9 | devFlags: 10 | manifests: 11 | - uri: https://github.com/opendatahub-io/data-science-pipelines-operator/tarball/main 12 | contextDir: config 13 | sourcePath: overlays/odh 14 | -------------------------------------------------------------------------------- /docs/example_pipelines/iris/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/python:3.9.17 2 | 3 | WORKDIR / 4 | RUN mkdir .cache .local minio /tmp/kfp s3 5 | RUN chgrp -R 0 /.cache && \ 6 | chmod -R g=u /.cache && \ 7 | chgrp -R 0 /tmp/kfp && \ 8 | chmod -R g=u /tmp/kfp && \ 9 | chgrp -R 0 /.local && \ 10 | chmod -R g=u /.local && \ 11 | chgrp -R 0 /minio && \ 12 | chmod -R g=u /minio && \ 13 | chgrp -R 0 /s3 && \ 14 | chmod -R g=u /s3 15 | -------------------------------------------------------------------------------- /docs/images/create_run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/704410ad06578bbad915cdf57837e089f8b066fa/docs/images/create_run.png -------------------------------------------------------------------------------- /docs/images/executed_run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/704410ad06578bbad915cdf57837e089f8b066fa/docs/images/executed_run.png -------------------------------------------------------------------------------- /docs/images/logs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/704410ad06578bbad915cdf57837e089f8b066fa/docs/images/logs.png -------------------------------------------------------------------------------- /docs/images/start_run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/704410ad06578bbad915cdf57837e089f8b066fa/docs/images/start_run.png -------------------------------------------------------------------------------- /docs/images/started_run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/704410ad06578bbad915cdf57837e089f8b066fa/docs/images/started_run.png -------------------------------------------------------------------------------- /docs/images/upload_flipcoin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/704410ad06578bbad915cdf57837e089f8b066fa/docs/images/upload_flipcoin.png -------------------------------------------------------------------------------- /docs/images/upload_pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendatahub-io/data-science-pipelines-operator/704410ad06578bbad915cdf57837e089f8b066fa/docs/images/upload_pipeline.png -------------------------------------------------------------------------------- /hack/boilerplate.go.txt: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2023. 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | */ 16 | -------------------------------------------------------------------------------- /scripts/release/README.md: -------------------------------------------------------------------------------- 1 | ## DSP Release tools 2 | 3 | The scripts found in this folder contain tools utilized for performing a DSP release. 4 | 5 | ### Params Generation 6 | This tool will generate a new `params.env` file based on the upcoming DSP tags. 7 | 8 | If images in Red Hat registry have also been updated (e.g. security fixes) without changes to tag version, then the newer 9 | digests will be used. The following command will generate the `params.env`: 10 | 11 | **Pre-condition**: All DSP/DSPO images should have been build with tag 12 | ``` 13 | python release.py params --tag v1.2.0 --out_file params.env \ 14 | --override="IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33" 15 | ``` 16 | 17 | See `--help` for more options like specifying tags for images not tied to DSP (ubi, mariadb, oauth proxy, etc.) 18 | 19 | ### Compatibility Doc generation 20 | Before each release, ensure that the [compatibility doc] is upto date. This doc is auto generated, the version compatibility 21 | is pulled from the [compatibility yaml]. The yaml should be kept upto date by developers (manual). 22 | 23 | To generate the version doc run the following: 24 | 25 | **Pre-condition**: ensure that [compatibility yaml] has an entry for the latest DSP version to be released, with version 26 | compatibility up to date. 27 | 28 | ``` 29 | python release.py --input_file compatibility.yaml --out_file compatibility.md 30 | ``` 31 | 32 | 33 | [compatibility doc]: ../../docs/release/compatibility.md 34 | [compatibility yaml]: ../../docs/release/compatibility.yaml 35 | -------------------------------------------------------------------------------- /scripts/release/template/version_doc.md: -------------------------------------------------------------------------------- 1 | # DSP Version Compatibility Table 2 | 3 | This is an auto generated DSP version compatibility table. 4 | Each row outlines the versions for individual subcomponents and images that are leveraged within DSP. 5 | 6 | <> 7 | 8 | 9 | [ml-metadata]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/ml-metadata/Dockerfile#L15 10 | [envoy]: https://github.com/opendatahub-io/data-science-pipelines/blob/master/third-party/metadata_envoy/Dockerfile#L15 11 | [oauth-proxy]: https://catalog.redhat.com/software/containers/openshift4/ose-oauth-proxy/5cdb2133bed8bd5717d5ae64?tag=v4.13.0-202307271338.p0.g44af5a3.assembly.stream&push_date=1691493453000 12 | [mariaDB]: https://catalog.redhat.com/software/containers/rhel8/mariadb-103/5ba0acf2d70cc57b0d1d9e78 13 | [ubi-minimal]: https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8?architecture=amd64&tag=8.8 14 | [ubi-micro]: https://catalog.redhat.com/software/containers/ubi8-micro/601a84aadd19c7786c47c8ea?architecture=amd64&tag=8.8 15 | -------------------------------------------------------------------------------- /scripts/release/version_doc.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import yaml 4 | 5 | 6 | def table(rows): 7 | """ 8 | Convert a list of cits into a markdown table. 9 | 10 | Pre-condition: All dicts in list_of_dicts should have identical key_sets 11 | :param rows: list of dict where each key set for every dict matches list of cols 12 | :return: A markdown where each row corresponds to a dict in list_of_dicts 13 | """ 14 | 15 | markdown_table = "" 16 | if len(rows) == 0: 17 | return markdown_table 18 | 19 | cols = [] 20 | for row in rows: 21 | cols.extend([key for key in row.keys() if key not in cols]) 22 | 23 | markdown_header = '| ' + ' | '.join(cols) + ' |' 24 | markdown_header_separator = '|-----' * len(cols) + '|' 25 | markdown_table += markdown_header + '\n' 26 | markdown_table += markdown_header_separator + '\n' 27 | for row in rows: 28 | markdown_row = "" 29 | for col in cols: 30 | markdown_row += '| ' + str(row[col]) + ' ' 31 | markdown_row += '|' + '\n' 32 | markdown_table += markdown_row 33 | return markdown_table 34 | 35 | 36 | def version_doc(args): 37 | input_file = args.input_file 38 | out_file = args.out_file 39 | with open(input_file, 'r') as f: 40 | rows = yaml.safe_load(f) 41 | 42 | dirname = os.path.dirname(__file__) 43 | template_file = os.path.join(dirname, 'template/version_doc.md') 44 | with open(template_file, 'r') as vd: 45 | final_md = vd.read() 46 | 47 | table_md = table(rows) 48 | 49 | final_md = final_md.replace('<>', table_md) 50 | final_md = '\n' + final_md 51 | 52 | with open(out_file, 'w') as f: 53 | f.write(final_md) 54 | -------------------------------------------------------------------------------- /tests/README.md: -------------------------------------------------------------------------------- 1 | # DSP Integration tests 2 | 3 | In this folder you will find the DSP Integration tests. These tests are intended to be run against a live Kubernetes or OCP 4 | cluster. They are also utilized in our KinD GitHub workflow (e.g. [kind-workflow]) 5 | 6 | The tests are scoped to an individual namespace and require the testing namespace to be created beforehand. 7 | 8 | ### Pre-requisites 9 | 10 | * Logged into an OCP/K8s cluster 11 | * Valid kubeconfig file (default is `$HOME/.kube/config`) 12 | * DSPO is already installed (either via ODH or manual) 13 | * An empty namespace to run the tests in 14 | 15 | 16 | ### Run tests locally 17 | 18 | 19 | #### Full test suite 20 | 21 | The full test suite will install a DSPA, wait for it to reach ready status, run the test suite, then clean up the DSPA 22 | afterwards. 23 | 24 | ```bash 25 | 26 | # Adjust the following as needed 27 | KUBECONFIG_PATH=$HOME/.kube/config # this is usually the default 28 | TARGET_CLUSTER=...(e.g. https://api.hukhan.dev.datahub.redhat.com:6443, you can retrieve this via `oc whoami --show-server`) 29 | TARGET_NAMESPACE=dspa # Do not use the same namespace as where DSPO is deployed (otherwise you will encounter some failed tests that verify DSPA deployment). 30 | 31 | git clone git@github.com:opendatahub-io/data-science-pipelines-operator.git ${DSPO_REPO} 32 | 33 | # Make sure DSPO is already deployed, if not then run: 34 | oc new-project opendatahub 35 | make deploy 36 | 37 | make integrationtest \ 38 | K8SAPISERVERHOST=${TARGET_CLUSTER} \ 39 | DSPANAMESPACE=${TARGET_NAMESPACE} \ 40 | KUBECONFIGPATH=${KUBECONFIG_PATH} 41 | ``` 42 | 43 | #### Use existing DSPA install 44 | 45 | For the impatient developer, you can use the following flag to skip DSPA install. This is useful when you want to make 46 | changes to a live environment and run the tests against it: 47 | 48 | ```bash 49 | go test ./... --tags=test_integration -v \ 50 | -kubeconfig=${KUBECONFIG_PATH} \ 51 | -k8sApiServerHost=${TARGET_CLUSTER} \ 52 | -DSPANamespace=${TARGET_NAMESPACE} \ 53 | -DSPAPath=resources/dspa-lite.yaml \ 54 | -skipDeploy=true \ 55 | -skipCleanup=true 56 | ``` 57 | 58 | The `skipDeploy` and `skipCleanup` flags are independent, and can be added/left out as needed for your use case. 59 | 60 | [kind-workflow]: ../.github/workflows/kind-integration.yml 61 | -------------------------------------------------------------------------------- /tests/experiments_test.go: -------------------------------------------------------------------------------- 1 | //go:build test_integration 2 | 3 | /* 4 | Copyright 2023. 5 | 6 | Licensed under the Apache License, Version 2.0 (the "License"); 7 | you may not use this file except in compliance with the License. 8 | You may obtain a copy of the License at 9 | 10 | http://www.apache.org/licenses/LICENSE-2.0 11 | 12 | Unless required by applicable law or agreed to in writing, software 13 | distributed under the License is distributed on an "AS IS" BASIS, 14 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | See the License for the specific language governing permissions and 16 | limitations under the License. 17 | */ 18 | 19 | package integration 20 | 21 | import ( 22 | "fmt" 23 | "io/ioutil" 24 | "testing" 25 | 26 | "github.com/stretchr/testify/require" 27 | ) 28 | 29 | func (suite *IntegrationTestSuite) TestFetchExperiments() { 30 | suite.T().Run("Should successfully fetch experiments", func(t *testing.T) { 31 | response, err := suite.Clientmgr.httpClient.Get(fmt.Sprintf("%s/apis/v2beta1/experiments", APIServerURL)) 32 | require.NoError(t, err, "Error fetching experiments") 33 | 34 | responseData, err := ioutil.ReadAll(response.Body) 35 | defer response.Body.Close() 36 | require.NoError(t, err, "Error reading response body") 37 | 38 | suite.Assert().Equal(200, response.StatusCode, "Expected HTTP status code 200 for fetching experiments") 39 | loggr.Info(string(responseData)) 40 | }) 41 | } 42 | -------------------------------------------------------------------------------- /tests/main.go: -------------------------------------------------------------------------------- 1 | //go:build test_integration 2 | 3 | /* 4 | Copyright 2023. 5 | 6 | Licensed under the Apache License, Version 2.0 (the "License"); 7 | you may not use this file except in compliance with the License. 8 | You may obtain a copy of the License at 9 | 10 | http://www.apache.org/licenses/LICENSE-2.0 11 | 12 | Unless required by applicable law or agreed to in writing, software 13 | distributed under the License is distributed on an "AS IS" BASIS, 14 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | See the License for the specific language governing permissions and 16 | limitations under the License. 17 | */ 18 | 19 | package integration 20 | 21 | // This file only exists as a workaround for: https://github.com/dnephin/pre-commit-golang/issues/78 22 | -------------------------------------------------------------------------------- /tests/resources/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/python:3.9.17 2 | 3 | WORKDIR / 4 | RUN mkdir .cache .local minio /tmp/kfp s3 gcs 5 | RUN chgrp -R 0 /.cache && \ 6 | chmod -R g=u /.cache && \ 7 | chgrp -R 0 /tmp/kfp && \ 8 | chmod -R g=u /tmp/kfp && \ 9 | chgrp -R 0 /.local && \ 10 | chmod -R g=u /.local && \ 11 | chgrp -R 0 /minio && \ 12 | chmod -R g=u /minio && \ 13 | chgrp -R 0 /s3 && \ 14 | chmod -R g=u /s3 && \ 15 | chgrp -R 0 /gcs && \ 16 | chmod -R g=u /gcs 17 | -------------------------------------------------------------------------------- /tests/resources/dspa-external-lite.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: dspa-ext 5 | spec: 6 | dspVersion: v2 7 | podToPodTLS: false 8 | apiServer: 9 | deploy: true 10 | enableOauth: false 11 | enableSamplePipeline: true 12 | cABundle: 13 | configMapName: root-ca 14 | configMapKey: public.crt 15 | resources: 16 | limits: 17 | cpu: 20m 18 | memory: 500Mi 19 | requests: 20 | cpu: 20m 21 | memory: 100m 22 | initResources: 23 | limits: 24 | cpu: 20m 25 | memory: 200Mi 26 | requests: 27 | cpu: 20m 28 | memory: 100Mi 29 | scheduledWorkflow: 30 | deploy: true 31 | resources: 32 | limits: 33 | cpu: 20m 34 | memory: 500Mi 35 | requests: 36 | cpu: 20m 37 | memory: 100m 38 | persistenceAgent: 39 | deploy: true 40 | resources: 41 | limits: 42 | cpu: 20m 43 | memory: 500Mi 44 | requests: 45 | cpu: 20m 46 | memory: 100Mi 47 | mlmd: 48 | deploy: true 49 | envoy: 50 | image: quay.io/maistra/proxyv2-ubi8:2.5.0 51 | deployRoute: false 52 | resources: 53 | limits: 54 | cpu: 20m 55 | memory: 500Mi 56 | requests: 57 | cpu: 20m 58 | memory: 100Mi 59 | grpc: 60 | resources: 61 | limits: 62 | cpu: 20m 63 | memory: 500Mi 64 | requests: 65 | cpu: 20m 66 | memory: 100Mi 67 | database: 68 | customExtraParams: '{"tls":"true"}' 69 | externalDB: 70 | host: mariadb.test-mariadb.svc.cluster.local 71 | port: "3306" 72 | username: mlpipeline 73 | pipelineDBName: mlpipeline 74 | passwordSecret: 75 | name: ds-pipeline-db-test 76 | key: password 77 | objectStorage: 78 | externalStorage: 79 | bucket: mlpipeline 80 | host: minio.test-minio.svc.cluster.local 81 | port: "9000" 82 | region: us-east-2 83 | s3CredentialsSecret: 84 | accessKey: accesskey 85 | secretKey: secretkey 86 | secretName: minio 87 | scheme: https 88 | -------------------------------------------------------------------------------- /tests/resources/dspa-external.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: dspa-ext 5 | spec: 6 | dspVersion: v2 7 | podToPodTLS: true 8 | apiServer: 9 | deploy: true 10 | enableOauth: true 11 | enableSamplePipeline: true 12 | cABundle: 13 | configMapName: root-ca 14 | configMapKey: public.crt 15 | initResources: 16 | limits: 17 | cpu: 20m 18 | memory: 200Mi 19 | requests: 20 | cpu: 20m 21 | memory: 100Mi 22 | scheduledWorkflow: 23 | deploy: true 24 | persistenceAgent: 25 | deploy: true 26 | mlmd: 27 | deploy: true 28 | database: 29 | customExtraParams: '{"tls":"true"}' 30 | externalDB: 31 | host: mariadb.test-mariadb.svc.cluster.local 32 | port: "3306" 33 | username: mlpipeline 34 | pipelineDBName: mlpipeline 35 | passwordSecret: 36 | name: ds-pipeline-db-test 37 | key: password 38 | objectStorage: 39 | externalStorage: 40 | bucket: mlpipeline 41 | host: minio.test-minio.svc.cluster.local 42 | port: "9000" 43 | region: us-east-2 44 | s3CredentialsSecret: 45 | accessKey: accesskey 46 | secretKey: secretkey 47 | secretName: minio 48 | scheme: https 49 | -------------------------------------------------------------------------------- /tests/resources/dspa-lite-tls.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: test-dspa 5 | spec: 6 | dspVersion: v2 7 | podToPodTLS: true 8 | apiServer: 9 | deploy: true 10 | enableSamplePipeline: true 11 | cABundle: 12 | configMapName: nginx-tls-config 13 | configMapKey: rootCA.crt 14 | resources: 15 | limits: 16 | cpu: 20m 17 | memory: 500Mi 18 | requests: 19 | cpu: 20m 20 | memory: 100Mi 21 | scheduledWorkflow: 22 | deploy: true 23 | resources: 24 | limits: 25 | cpu: 20m 26 | memory: 500Mi 27 | requests: 28 | cpu: 20m 29 | memory: 100Mi 30 | persistenceAgent: 31 | deploy: true 32 | resources: 33 | limits: 34 | cpu: 20m 35 | memory: 500Mi 36 | requests: 37 | cpu: 20m 38 | memory: 100Mi 39 | mlmd: 40 | deploy: true 41 | envoy: 42 | deployRoute: false 43 | resources: 44 | limits: 45 | cpu: 20m 46 | memory: 500Mi 47 | requests: 48 | cpu: 20m 49 | memory: 100Mi 50 | grpc: 51 | resources: 52 | limits: 53 | cpu: 20m 54 | memory: 500Mi 55 | requests: 56 | cpu: 20m 57 | memory: 100Mi 58 | database: 59 | mariaDB: 60 | deploy: true 61 | pvcSize: 500Mi 62 | resources: 63 | limits: 64 | cpu: 60m 65 | memory: 500Mi 66 | requests: 67 | cpu: 60m 68 | memory: 500Mi 69 | objectStorage: 70 | minio: 71 | deploy: true 72 | image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' 73 | pvcSize: 500Mi 74 | resources: 75 | limits: 76 | cpu: 20m 77 | memory: 500Mi 78 | requests: 79 | cpu: 20m 80 | memory: 100Mi 81 | -------------------------------------------------------------------------------- /tests/resources/dspa-lite.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: test-dspa 5 | spec: 6 | dspVersion: v2 7 | podToPodTLS: false 8 | apiServer: 9 | deploy: true 10 | enableOauth: false 11 | enableSamplePipeline: true 12 | cABundle: 13 | configMapName: nginx-tls-config 14 | configMapKey: rootCA.crt 15 | resources: 16 | limits: 17 | cpu: 20m 18 | memory: 500Mi 19 | requests: 20 | cpu: 20m 21 | memory: 100m 22 | scheduledWorkflow: 23 | deploy: true 24 | resources: 25 | limits: 26 | cpu: 20m 27 | memory: 500Mi 28 | requests: 29 | cpu: 20m 30 | memory: 100m 31 | persistenceAgent: 32 | deploy: true 33 | resources: 34 | limits: 35 | cpu: 20m 36 | memory: 500Mi 37 | requests: 38 | cpu: 20m 39 | memory: 100Mi 40 | mlmd: 41 | deploy: true 42 | envoy: 43 | image: quay.io/maistra/proxyv2-ubi8:2.5.0 44 | deployRoute: false 45 | resources: 46 | limits: 47 | cpu: 20m 48 | memory: 500Mi 49 | requests: 50 | cpu: 20m 51 | memory: 100Mi 52 | grpc: 53 | resources: 54 | limits: 55 | cpu: 20m 56 | memory: 500Mi 57 | requests: 58 | cpu: 20m 59 | memory: 100Mi 60 | database: 61 | mariaDB: 62 | deploy: true 63 | image: quay.io/sclorg/mariadb-105-c9s:latest 64 | pvcSize: 500Mi 65 | resources: 66 | limits: 67 | cpu: 60m 68 | memory: 500Mi 69 | requests: 70 | cpu: 60m 71 | memory: 500Mi 72 | objectStorage: 73 | minio: 74 | deploy: true 75 | image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' 76 | pvcSize: 500Mi 77 | resources: 78 | limits: 79 | cpu: 20m 80 | memory: 500Mi 81 | requests: 82 | cpu: 20m 83 | memory: 100m 84 | -------------------------------------------------------------------------------- /tests/resources/dspa.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 2 | kind: DataSciencePipelinesApplication 3 | metadata: 4 | name: test-dspa 5 | spec: 6 | dspVersion: v2 7 | podToPodTLS: true 8 | apiServer: 9 | deploy: true 10 | enableOauth: true 11 | enableSamplePipeline: true 12 | cABundle: 13 | configMapName: nginx-tls-config 14 | configMapKey: rootCA.crt 15 | objectStorage: 16 | minio: 17 | deploy: true 18 | image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' 19 | pvcSize: 500Mi 20 | resources: 21 | limits: 22 | cpu: 20m 23 | memory: 500Mi 24 | requests: 25 | cpu: 20m 26 | memory: 100m 27 | -------------------------------------------------------------------------------- /tests/resources/iris_pipeline_without_cache.py: -------------------------------------------------------------------------------- 1 | from kfp import compiler, dsl 2 | from kfp.dsl import Dataset, Input, Model, Output 3 | 4 | @dsl.component() 5 | def create_dataset(iris_dataset: Output[Dataset]): 6 | data = """\ 7 | Sepal_Length,Sepal_Width,Petal_Length,Petal_Width,Labels 8 | 5.1,3.5,1.4,0.2,Iris-setosa 9 | 4.9,3.0,1.4,0.2,Iris-setosa 10 | 4.7,3.2,1.3,0.2,Iris-setosa 11 | 7.0,3.2,4.7,1.4,Iris-versicolor 12 | 6.4,3.2,4.5,1.5,Iris-versicolor 13 | 6.9,3.1,4.9,1.5,Iris-versicolor 14 | 6.3,3.3,6.0,2.5,Iris-virginica 15 | 5.8,2.7,5.1,1.9,Iris-virginica 16 | 7.1,3.0,5.9,2.1,Iris-virginica 17 | """ 18 | with open(iris_dataset.path, "w") as f: 19 | f.write(data) 20 | 21 | @dsl.component() 22 | def train_model( 23 | iris_dataset: Input[Dataset], # <-- renamed 24 | model: Output[Model], 25 | ): 26 | with open(model.path, "w") as f: 27 | f.write("my model") 28 | 29 | @dsl.pipeline(name="sample-training-pipeline") 30 | def my_pipeline(): 31 | create_dataset_task = create_dataset().set_caching_options(False) 32 | train_model(iris_dataset=create_dataset_task.output).set_caching_options(False) # <-- matching name 33 | 34 | if __name__ == "__main__": 35 | compiler.Compiler().compile(my_pipeline, package_path=__file__.replace(".py", "_compiled.yaml")) 36 | -------------------------------------------------------------------------------- /tests/resources/test-pipeline-with-custom-pip-server.py: -------------------------------------------------------------------------------- 1 | from kfp import dsl, compiler 2 | 3 | # Edited the compiled version manually, to remove the --trusted-host flag 4 | # this is so we can test for tls certs validations when launcher installs packages 5 | @dsl.component(base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.0", 6 | packages_to_install=['numpy'], 7 | pip_index_urls=['https://nginx-service.test-pypiserver.svc.cluster.local/simple/'], 8 | pip_trusted_hosts=[]) 9 | def say_hello() -> str: 10 | import numpy as np 11 | hello_text = f'Numpy version: {np.__version__}' 12 | print(hello_text) 13 | return hello_text 14 | 15 | 16 | @dsl.pipeline 17 | def hello_pipeline() -> str: 18 | hello_task = say_hello() 19 | return hello_task.output 20 | 21 | 22 | if __name__ == '__main__': 23 | compiler.Compiler().compile(hello_pipeline, __file__.replace('.py', '-run.yaml')) 24 | -------------------------------------------------------------------------------- /tests/resources/test-pipeline.py: -------------------------------------------------------------------------------- 1 | from kfp import dsl, compiler 2 | 3 | 4 | @dsl.component(base_image="quay.io/opendatahub/ds-pipelines-ci-executor-image:v1.1") 5 | def say_hello(name: str) -> str: 6 | hello_text = f'Hello, {name}!' 7 | print(hello_text) 8 | return hello_text 9 | 10 | 11 | @dsl.pipeline 12 | def hello_pipeline(recipient: str) -> str: 13 | hello_task = say_hello(name=recipient) 14 | return hello_task.output 15 | 16 | 17 | if __name__ == '__main__': 18 | compiler.Compiler().compile(hello_pipeline, __file__.replace('.py', '-run.yaml')) 19 | -------------------------------------------------------------------------------- /tests/setup/datasciencecluster_openshift.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: datasciencecluster.opendatahub.io/v1 2 | kind: DataScienceCluster 3 | metadata: 4 | name: opendatahub 5 | spec: 6 | components: 7 | dashboard: 8 | managementState: Managed 9 | datasciencepipelines: 10 | managementState: Managed 11 | -------------------------------------------------------------------------------- /tests/upgrades/main.sh: -------------------------------------------------------------------------------- 1 | kubectl create namespace ${DSPA_NS} 2 | cd ${GITHUB_WORKSPACE}/config/samples/v1/dspa-simple 3 | kustomize build . | kubectl -n ${DSPA_NS} apply -f - 4 | --------------------------------------------------------------------------------