├── .github ├── dependabot.yml └── workflows │ ├── helm.yml │ └── rust.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── LICENSE ├── README.md ├── SUMMARY.md ├── charts ├── README.md └── databricks-kube-operator │ ├── .helmignore │ ├── Chart.yaml │ ├── README.md │ ├── templates │ ├── _helpers.tpl │ ├── crds.yaml │ ├── rbac.yaml │ └── sts.yaml │ └── values.yaml ├── databricks-kube ├── Cargo.toml ├── src │ ├── context.rs │ ├── crdgen.rs │ ├── crds │ │ ├── databricks_job.rs │ │ ├── databricks_secret.rs │ │ ├── databricks_secret_scope.rs │ │ ├── git_credential.rs │ │ ├── mod.rs │ │ └── repo.rs │ ├── error.rs │ ├── lib.rs │ ├── main.rs │ ├── traits │ │ ├── mod.rs │ │ ├── remote_api_resource.rs │ │ ├── remote_api_status.rs │ │ └── rest_config.rs │ └── util.rs └── tests │ ├── common │ ├── fake_resource.rs │ ├── mock_k8s.rs │ └── mod.rs │ ├── fixtures │ ├── random-1.json │ └── random-2.json │ ├── remote_api_resource_test.rs │ ├── status_api_resource_test.rs │ └── util_test.rs ├── databricks-rust-git-credentials ├── .gitignore ├── .openapi-generator-ignore ├── .openapi-generator │ ├── FILES │ └── VERSION ├── .travis.yml ├── Cargo.toml ├── README.md ├── docs │ ├── CreateCredentialRequest.md │ ├── DefaultApi.md │ ├── Error.md │ ├── GetCredentialResponse.md │ ├── GetCredentialsResponse.md │ ├── README.md │ └── UpdateCredentialRequest.md ├── git_push.sh └── src │ ├── apis │ ├── configuration.rs │ ├── default_api.rs │ └── mod.rs │ ├── lib.rs │ └── models │ ├── create_credential_request.rs │ ├── error.rs │ ├── get_credential_response.rs │ ├── get_credentials_response.rs │ ├── mod.rs │ └── update_credential_request.rs ├── databricks-rust-jobs ├── .gitignore ├── .openapi-generator-ignore ├── .openapi-generator │ ├── FILES │ └── VERSION ├── .travis.yml ├── Cargo.toml ├── README.md ├── docs │ ├── AccessControlList.md │ ├── AccessControlRequest.md │ ├── AccessControlRequestForGroup.md │ ├── AccessControlRequestForServicePrincipal.md │ ├── AccessControlRequestForUser.md │ ├── AutoScale.md │ ├── AwsAttributes.md │ ├── CanManage.md │ ├── CanManageRun.md │ ├── CanView.md │ ├── ClusterAttributes.md │ ├── ClusterCloudProviderNodeInfo.md │ ├── ClusterCloudProviderNodeStatus.md │ ├── ClusterEvent.md │ ├── ClusterEventType.md │ ├── ClusterInfo.md │ ├── ClusterInstance.md │ ├── ClusterLibraryStatuses.md │ ├── ClusterLogConf.md │ ├── ClusterSize.md │ ├── ClusterSource.md │ ├── ClusterSpec.md │ ├── ClusterState.md │ ├── Continuous.md │ ├── CronSchedule.md │ ├── DbfsStorageInfo.md │ ├── DbtOutput.md │ ├── DbtTask.md │ ├── DefaultApi.md │ ├── DockerBasicAuth.md │ ├── DockerImage.md │ ├── Error.md │ ├── EventDetails.md │ ├── FileStorageInfo.md │ ├── GitSnapshot.md │ ├── GitSource.md │ ├── InitScriptInfo.md │ ├── IsOwner.md │ ├── Job.md │ ├── JobCluster.md │ ├── JobEmailNotifications.md │ ├── JobSettings.md │ ├── JobSettingsQueue.md │ ├── JobTask.md │ ├── JobTaskSettings.md │ ├── JobsCreate200Response.md │ ├── JobsCreateRequest.md │ ├── JobsDeleteRequest.md │ ├── JobsGet200Response.md │ ├── JobsList200Response.md │ ├── JobsResetRequest.md │ ├── JobsRunNow200Response.md │ ├── JobsRunNowRequest.md │ ├── JobsRunsCancelAllRequest.md │ ├── JobsRunsCancelRequest.md │ ├── JobsRunsDeleteRequest.md │ ├── JobsRunsExport200Response.md │ ├── JobsRunsGet200Response.md │ ├── JobsRunsGetOutput200Response.md │ ├── JobsRunsList200Response.md │ ├── JobsRunsRepair200Response.md │ ├── JobsRunsRepairRequest.md │ ├── JobsRunsSubmit200Response.md │ ├── JobsRunsSubmitRequest.md │ ├── JobsUpdateRequest.md │ ├── Library.md │ ├── LibraryFullStatus.md │ ├── LibraryInstallStatus.md │ ├── ListOrder.md │ ├── LogSyncStatus.md │ ├── MavenLibrary.md │ ├── NewCluster.md │ ├── NewTaskCluster.md │ ├── NodeType.md │ ├── NotebookOutput.md │ ├── NotebookTask.md │ ├── PermissionLevel.md │ ├── PermissionLevelForGroup.md │ ├── PipelineTask.md │ ├── PoolClusterTerminationCode.md │ ├── PythonPyPiLibrary.md │ ├── PythonWheelTask.md │ ├── RCranLibrary.md │ ├── RepairHistory.md │ ├── RepairHistoryItem.md │ ├── RepairRunInput.md │ ├── ResizeCause.md │ ├── Run.md │ ├── RunLifeCycleState.md │ ├── RunNowInput.md │ ├── RunParameters.md │ ├── RunParametersPipelineParams.md │ ├── RunResultState.md │ ├── RunState.md │ ├── RunSubmitSettings.md │ ├── RunSubmitTaskSettings.md │ ├── RunTask.md │ ├── RunType.md │ ├── S3StorageInfo.md │ ├── SparkJarTask.md │ ├── SparkNode.md │ ├── SparkNodeAwsAttributes.md │ ├── SparkPythonTask.md │ ├── SparkSubmitTask.md │ ├── SparkVersion.md │ ├── SqlAlertOutput.md │ ├── SqlDashboardOutput.md │ ├── SqlDashboardWidgetOutput.md │ ├── SqlOutput.md │ ├── SqlOutputError.md │ ├── SqlQueryOutput.md │ ├── SqlStatementOutput.md │ ├── SqlTask.md │ ├── SqlTaskAlert.md │ ├── SqlTaskDashboard.md │ ├── SqlTaskQuery.md │ ├── TaskDependenciesInner.md │ ├── TaskSparkSubmitTask.md │ ├── TerminationCode.md │ ├── TerminationParameter.md │ ├── TerminationReason.md │ ├── TerminationType.md │ ├── TriggerType.md │ ├── ViewItem.md │ ├── ViewType.md │ ├── ViewsToExport.md │ ├── WebhookNotifications.md │ └── WebhookNotificationsOnStartInner.md ├── git_push.sh └── src │ ├── apis │ ├── configuration.rs │ ├── default_api.rs │ └── mod.rs │ ├── lib.rs │ └── models │ ├── access_control_list.rs │ ├── access_control_request.rs │ ├── access_control_request_for_group.rs │ ├── access_control_request_for_service_principal.rs │ ├── access_control_request_for_user.rs │ ├── auto_scale.rs │ ├── aws_attributes.rs │ ├── can_manage.rs │ ├── can_manage_run.rs │ ├── can_view.rs │ ├── cluster_attributes.rs │ ├── cluster_cloud_provider_node_info.rs │ ├── cluster_cloud_provider_node_status.rs │ ├── cluster_event.rs │ ├── cluster_event_type.rs │ ├── cluster_info.rs │ ├── cluster_instance.rs │ ├── cluster_library_statuses.rs │ ├── cluster_log_conf.rs │ ├── cluster_size.rs │ ├── cluster_source.rs │ ├── cluster_spec.rs │ ├── cluster_state.rs │ ├── continuous.rs │ ├── cron_schedule.rs │ ├── dbfs_storage_info.rs │ ├── dbt_output.rs │ ├── dbt_task.rs │ ├── docker_basic_auth.rs │ ├── docker_image.rs │ ├── error.rs │ ├── event_details.rs │ ├── file_storage_info.rs │ ├── git_snapshot.rs │ ├── git_source.rs │ ├── init_script_info.rs │ ├── is_owner.rs │ ├── job.rs │ ├── job_cluster.rs │ ├── job_email_notifications.rs │ ├── job_settings.rs │ ├── job_settings_queue.rs │ ├── job_task.rs │ ├── job_task_settings.rs │ ├── jobs_create_200_response.rs │ ├── jobs_create_request.rs │ ├── jobs_delete_request.rs │ ├── jobs_get_200_response.rs │ ├── jobs_list_200_response.rs │ ├── jobs_reset_request.rs │ ├── jobs_run_now_200_response.rs │ ├── jobs_run_now_request.rs │ ├── jobs_runs_cancel_all_request.rs │ ├── jobs_runs_cancel_request.rs │ ├── jobs_runs_delete_request.rs │ ├── jobs_runs_export_200_response.rs │ ├── jobs_runs_get_200_response.rs │ ├── jobs_runs_get_output_200_response.rs │ ├── jobs_runs_list_200_response.rs │ ├── jobs_runs_repair_200_response.rs │ ├── jobs_runs_repair_request.rs │ ├── jobs_runs_submit_200_response.rs │ ├── jobs_runs_submit_request.rs │ ├── jobs_update_request.rs │ ├── library.rs │ ├── library_full_status.rs │ ├── library_install_status.rs │ ├── list_order.rs │ ├── log_sync_status.rs │ ├── maven_library.rs │ ├── mod.rs │ ├── new_cluster.rs │ ├── new_task_cluster.rs │ ├── node_type.rs │ ├── notebook_output.rs │ ├── notebook_task.rs │ ├── permission_level.rs │ ├── permission_level_for_group.rs │ ├── pipeline_task.rs │ ├── pool_cluster_termination_code.rs │ ├── python_py_pi_library.rs │ ├── python_wheel_task.rs │ ├── r_cran_library.rs │ ├── repair_history.rs │ ├── repair_history_item.rs │ ├── repair_run_input.rs │ ├── resize_cause.rs │ ├── run.rs │ ├── run_life_cycle_state.rs │ ├── run_now_input.rs │ ├── run_parameters.rs │ ├── run_parameters_pipeline_params.rs │ ├── run_result_state.rs │ ├── run_state.rs │ ├── run_submit_settings.rs │ ├── run_submit_task_settings.rs │ ├── run_task.rs │ ├── run_type.rs │ ├── s3_storage_info.rs │ ├── spark_jar_task.rs │ ├── spark_node.rs │ ├── spark_node_aws_attributes.rs │ ├── spark_python_task.rs │ ├── spark_submit_task.rs │ ├── spark_version.rs │ ├── sql_alert_output.rs │ ├── sql_dashboard_output.rs │ ├── sql_dashboard_widget_output.rs │ ├── sql_output.rs │ ├── sql_output_error.rs │ ├── sql_query_output.rs │ ├── sql_statement_output.rs │ ├── sql_task.rs │ ├── sql_task_alert.rs │ ├── sql_task_dashboard.rs │ ├── sql_task_query.rs │ ├── task_dependencies_inner.rs │ ├── task_spark_submit_task.rs │ ├── termination_code.rs │ ├── termination_parameter.rs │ ├── termination_reason.rs │ ├── termination_type.rs │ ├── trigger_type.rs │ ├── view_item.rs │ ├── view_type.rs │ ├── views_to_export.rs │ ├── webhook_notifications.rs │ └── webhook_notifications_on_start_inner.rs ├── databricks-rust-repos ├── .gitignore ├── .openapi-generator-ignore ├── .openapi-generator │ ├── FILES │ └── VERSION ├── .travis.yml ├── Cargo.toml ├── README.md ├── docs │ ├── Branch.md │ ├── CreateRepoRequest.md │ ├── DefaultApi.md │ ├── Error.md │ ├── GetRepoResponse.md │ ├── GetReposResponse.md │ ├── README.md │ ├── Tag.md │ └── UpdateRepoRequest.md ├── git_push.sh └── src │ ├── apis │ ├── configuration.rs │ ├── default_api.rs │ └── mod.rs │ ├── lib.rs │ └── models │ ├── branch.rs │ ├── create_repo_request.rs │ ├── error.rs │ ├── get_repo_response.rs │ ├── get_repos_response.rs │ ├── mod.rs │ ├── tag.rs │ └── update_repo_request.rs ├── databricks-rust-secrets ├── .gitignore ├── .openapi-generator-ignore ├── .openapi-generator │ ├── FILES │ └── VERSION ├── .travis.yml ├── Cargo.toml ├── README.md ├── docs │ ├── SecretApi.md │ ├── WorkspaceAclItem.md │ ├── WorkspaceAclPermission.md │ ├── WorkspaceAzureKeyVaultSecretScopeMetadata.md │ ├── WorkspaceCreateScope.md │ ├── WorkspaceDeleteAcl.md │ ├── WorkspaceDeleteScope.md │ ├── WorkspaceDeleteSecret.md │ ├── WorkspaceGetSecretResponse.md │ ├── WorkspaceListAclsResponse.md │ ├── WorkspaceListScopesResponse.md │ ├── WorkspaceListSecretsResponse.md │ ├── WorkspacePutAcl.md │ ├── WorkspacePutSecret.md │ ├── WorkspaceScopeBackendType.md │ ├── WorkspaceSecretMetadata.md │ └── WorkspaceSecretScope.md ├── git_push.sh └── src │ ├── apis │ ├── configuration.rs │ ├── mod.rs │ └── secret_api.rs │ ├── lib.rs │ └── models │ ├── mod.rs │ ├── workspace_acl_item.rs │ ├── workspace_acl_permission.rs │ ├── workspace_azure_key_vault_secret_scope_metadata.rs │ ├── workspace_create_scope.rs │ ├── workspace_delete_acl.rs │ ├── workspace_delete_scope.rs │ ├── workspace_delete_secret.rs │ ├── workspace_get_secret_response.rs │ ├── workspace_list_acls_response.rs │ ├── workspace_list_scopes_response.rs │ ├── workspace_list_secrets_response.rs │ ├── workspace_put_acl.rs │ ├── workspace_put_secret.rs │ ├── workspace_scope_backend_type.rs │ ├── workspace_secret_metadata.rs │ └── workspace_secret_scope.rs ├── examples ├── databricks-job.yaml ├── databricks-secret.yaml ├── git-credential.yaml ├── git-repo.yaml └── job.py ├── openapi ├── config-git.yaml ├── config-jobs.yaml ├── config-repos.yaml ├── config-secrets.yaml ├── gitcredentials-2.0-aws.yaml ├── jobs-2.1-aws.yaml ├── oem │ ├── gitcredentials-2.0-aws.yaml │ ├── jobs-2.1-aws.yaml │ └── repos-2.0-aws.yaml ├── openapi-helper.bb └── repos-2.0-aws.yaml └── tutorial.md /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | 2 | version: 2 3 | updates: 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "daily" 8 | -------------------------------------------------------------------------------- /.github/workflows/helm.yml: -------------------------------------------------------------------------------- 1 | on: [push] 2 | 3 | name: Helm Chart 4 | 5 | jobs: 6 | release: 7 | if: github.ref == 'refs/heads/master' 8 | # depending on default permission settings for your org (contents being read-only or read-write for workloads), you will have to add permissions 9 | # see: https://docs.github.com/en/actions/security-guides/automatic-token-authentication#modifying-the-permissions-for-the-github_token 10 | permissions: 11 | contents: write 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v4 16 | with: 17 | fetch-depth: 0 18 | - name: Configure Git 19 | run: | 20 | git config user.name "$GITHUB_ACTOR" 21 | git config user.email "$GITHUB_ACTOR@users.noreply.github.com" 22 | - name: Install Helm 23 | uses: azure/setup-helm@v3 24 | with: 25 | version: v3.10.0 26 | - name: Run chart-releaser 27 | uses: helm/chart-releaser-action@v1.6.0 28 | env: 29 | CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}" 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .idea/** 3 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "databricks-kube", 4 | "databricks-rust-jobs", 5 | "databricks-rust-git-credentials", 6 | "databricks-rust-repos", 7 | "databricks-rust-secrets" 8 | ] 9 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:latest 2 | 3 | RUN apt update 4 | RUN apt install -y tini ca-certificates libssl-dev 5 | 6 | ENTRYPOINT ["/usr/bin/tini", "--"] 7 | 8 | WORKDIR /home/operator 9 | 10 | ADD target/release/crd_gen . 11 | ADD target/release/databricks_kube . 12 | 13 | RUN chmod +x crd_gen 14 | RUN chmod +x databricks_kube 15 | 16 | ENV RUST_LOG databricks_kube 17 | CMD ["/home/operator/databricks_kube"] 18 | -------------------------------------------------------------------------------- /charts/README.md: -------------------------------------------------------------------------------- 1 | # charts 2 | 3 | -------------------------------------------------------------------------------- /charts/databricks-kube-operator/.helmignore: -------------------------------------------------------------------------------- 1 | # Patterns to ignore when building packages. 2 | # This supports shell glob matching, relative path matching, and 3 | # negation (prefixed with !). Only one pattern per line. 4 | .DS_Store 5 | # Common VCS dirs 6 | .git/ 7 | .gitignore 8 | .bzr/ 9 | .bzrignore 10 | .hg/ 11 | .hgignore 12 | .svn/ 13 | # Common backup files 14 | *.swp 15 | *.bak 16 | *.tmp 17 | *.orig 18 | *~ 19 | # Various IDEs 20 | .project 21 | .idea/ 22 | *.tmproj 23 | .vscode/ 24 | -------------------------------------------------------------------------------- /charts/databricks-kube-operator/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | appVersion: 0.8.4 3 | name: databricks-kube-operator 4 | description: A kube-rs operator for managing Databricks API resources 5 | version: 0.9.5 6 | 7 | home: https://github.com/mach-kernel/databricks-kube-operator 8 | sources: 9 | - https://github.com/mach-kernel/databricks-kube-operator 10 | 11 | keywords: 12 | - databricks 13 | - gitops 14 | - kube-rs 15 | maintainers: 16 | - name: mach-kernel 17 | url: https://mach-kernel.github.io/databricks-kube-operator/ 18 | -------------------------------------------------------------------------------- /charts/databricks-kube-operator/README.md: -------------------------------------------------------------------------------- 1 | ## databricks-kube-operator Helm Chart 2 | 3 | ``` 4 | ../target/release/crd_gen > templates/crds.yaml 5 | ``` -------------------------------------------------------------------------------- /charts/databricks-kube-operator/templates/sts.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: apps/v1 3 | kind: StatefulSet 4 | metadata: 5 | name: databricks-kube-operator 6 | namespace: {{ .Release.Namespace }} 7 | labels: 8 | app: {{ template "databricks-kube-operator.name" . }} 9 | release: {{ .Release.Name }} 10 | spec: 11 | selector: 12 | matchLabels: 13 | app: databricks-kube-operator 14 | replicas: 1 15 | serviceName: databricks-kube-operator 16 | template: 17 | metadata: 18 | annotations: 19 | {{- toYaml .Values.podAnnotations | nindent 8 }} 20 | labels: 21 | app: {{ template "databricks-kube-operator.name" . }} 22 | spec: 23 | serviceAccountName: {{ .Values.serviceAccount.name }} 24 | terminationGracePeriodSeconds: 10 25 | containers: 26 | - name: dko 27 | image: {{ .Values.image.repository }}:{{default .Chart.AppVersion .Values.image.tag }} 28 | imagePullPolicy: Always 29 | env: 30 | - name: DATABRICKS_KUBE_CONFIGMAP 31 | value: {{ .Values.configMapName }} 32 | - name: RUST_LOG 33 | value: databricks_kube 34 | resources: 35 | {{- toYaml .Values.resources | nindent 10 }} 36 | affinity: 37 | {{- toYaml .Values.affinity | nindent 8 }} 38 | nodeSelector: 39 | {{- toYaml .Values.nodeSelector | nindent 8 }} 40 | -------------------------------------------------------------------------------- /charts/databricks-kube-operator/values.yaml: -------------------------------------------------------------------------------- 1 | configMapName: databricks-kube-operator 2 | installCRDs: true 3 | 4 | image: 5 | repository: ghcr.io/mach-kernel/databricks-kube-operator 6 | # -- Overrides the image tag whose default is the chart appVersion 7 | tag: "" 8 | 9 | podAnnotations: {} 10 | nodeSelector: 11 | kubernetes.io/os: linux 12 | kubernetes.io/arch: amd64 13 | resources: {} 14 | affinity: {} 15 | 16 | serviceAccount: 17 | create: true 18 | name: "databricks-kube-operator" 19 | -------------------------------------------------------------------------------- /databricks-kube/Cargo.toml: -------------------------------------------------------------------------------- 1 | [[bin]] 2 | name = "crd_gen" 3 | path = "src/crdgen.rs" 4 | 5 | [package] 6 | name = "databricks_kube" 7 | default-run = "databricks_kube" 8 | version = "0.8.3" 9 | edition = "2021" 10 | 11 | [dependencies] 12 | databricks_rust_jobs = { path = "../databricks-rust-jobs" } 13 | databricks_rust_git_credentials = { path = "../databricks-rust-git-credentials" } 14 | databricks_rust_repos = { path = "../databricks-rust-repos" } 15 | databricks_rust_secrets = { path = "../databricks-rust-secrets" } 16 | 17 | async-stream = "0.3.3" 18 | assert-json-diff = "2.0.2" 19 | flurry = "0.4.0" 20 | env_logger = { version = "0.9.1" } 21 | futures = "0.3" 22 | git-version = "0.3.5" 23 | jsonschema = "0.16.1" 24 | kube = { version = "0.92.0", features = ["runtime", "derive", "client"] } 25 | k8s-openapi = { version = "0.22.0", features = ["v1_25"] } 26 | lazy_static = "1.4.0" 27 | log = "0.4.17" 28 | serde = { version = "1.0.147", features = ["derive"] } 29 | serde_json = "1.0.87" 30 | serde_yaml = "0.9.14" 31 | schemars = { version = "0.8.11", features = ["derive"] } 32 | tokio = { version = "1.24.2", features = ["macros", "rt-multi-thread"] } 33 | tokio-graceful-shutdown = "0.11.1" 34 | tokio-stream = "0.1.11" 35 | thiserror = "1.0.56" 36 | 37 | [dependencies.reqwest] 38 | version = "^0.12" 39 | features = ["json", "multipart"] 40 | 41 | [dev-dependencies] 42 | tower-test = "0.4.0" 43 | hyper = "0.14.23" 44 | http = "1.1.0" 45 | http-body = "1.0.0" 46 | http-body-util = "0.1.2" -------------------------------------------------------------------------------- /databricks-kube/src/crdgen.rs: -------------------------------------------------------------------------------- 1 | mod context; 2 | mod crds; 3 | mod error; 4 | mod traits; 5 | mod util; 6 | 7 | use kube::CustomResourceExt; 8 | use serde_yaml::to_string; 9 | 10 | fn main() { 11 | print!( 12 | "---\n{}\n", 13 | to_string(&crate::crds::databricks_job::DatabricksJob::crd()).unwrap() 14 | ); 15 | print!( 16 | "---\n{}\n", 17 | to_string(&crate::crds::git_credential::GitCredential::crd()).unwrap() 18 | ); 19 | print!( 20 | "---\n{}\n", 21 | to_string(&crate::crds::repo::Repo::crd()).unwrap() 22 | ); 23 | print!( 24 | "---\n{}\n", 25 | to_string(&crate::crds::databricks_secret_scope::DatabricksSecretScope::crd()).unwrap() 26 | ); 27 | print!( 28 | "---\n{}\n", 29 | to_string(&crate::crds::databricks_secret::DatabricksSecret::crd()).unwrap() 30 | ); 31 | } 32 | -------------------------------------------------------------------------------- /databricks-kube/src/crds/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod databricks_job; 2 | pub mod databricks_secret; 3 | pub mod databricks_secret_scope; 4 | pub mod git_credential; 5 | pub mod repo; 6 | -------------------------------------------------------------------------------- /databricks-kube/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod context; 2 | pub mod crds; 3 | pub mod error; 4 | pub mod traits; 5 | pub mod util; 6 | -------------------------------------------------------------------------------- /databricks-kube/src/traits/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod remote_api_resource; 2 | pub mod remote_api_status; 3 | pub mod rest_config; 4 | -------------------------------------------------------------------------------- /databricks-kube/tests/common/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod fake_resource; 2 | pub mod mock_k8s; 3 | -------------------------------------------------------------------------------- /databricks-kube/tests/util_test.rs: -------------------------------------------------------------------------------- 1 | use serde_json::Value; 2 | use std::collections::hash_map::DefaultHasher; 3 | use std::hash::Hasher; 4 | 5 | use databricks_kube::util::hash_json_value; 6 | 7 | #[test] 8 | fn test_hash() { 9 | let mut hasher_a = DefaultHasher::new(); 10 | let mut hasher_b = DefaultHasher::new(); 11 | 12 | let fuzzy_json: Vec = vec![ 13 | serde_json::from_str(include_str!("fixtures/random-1.json")).unwrap(), 14 | serde_json::from_str(include_str!("fixtures/random-2.json")).unwrap(), 15 | ]; 16 | 17 | for j in fuzzy_json { 18 | hash_json_value(&mut hasher_a, &j); 19 | hash_json_value(&mut hasher_b, &j); 20 | 21 | assert_eq!(hasher_a.finish(), hasher_b.finish()); 22 | 23 | hasher_a = DefaultHasher::new(); 24 | hasher_b = DefaultHasher::new(); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | **/*.rs.bk 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/.openapi-generator-ignore: -------------------------------------------------------------------------------- 1 | # OpenAPI Generator Ignore 2 | # Generated by openapi-generator https://github.com/openapitools/openapi-generator 3 | 4 | # Use this file to prevent files from being overwritten by the generator. 5 | # The patterns follow closely to .gitignore or .dockerignore. 6 | 7 | # As an example, the C# client generator defines ApiClient.cs. 8 | # You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: 9 | #ApiClient.cs 10 | 11 | # You can match any string of characters against a directory, file or extension with a single asterisk (*): 12 | #foo/*/qux 13 | # The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux 14 | 15 | # You can recursively match patterns against a directory, file or extension with a double asterisk (**): 16 | #foo/**/qux 17 | # This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux 18 | 19 | # You can also negate patterns with an exclamation (!). 20 | # For example, you can ignore all files in a docs folder with the file extension .md: 21 | #docs/*.md 22 | # Then explicitly reverse the ignore rule for a single file: 23 | #!docs/README.md 24 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/.openapi-generator/FILES: -------------------------------------------------------------------------------- 1 | .gitignore 2 | .openapi-generator-ignore 3 | .travis.yml 4 | Cargo.toml 5 | README.md 6 | docs/CreateCredentialRequest.md 7 | docs/DefaultApi.md 8 | docs/Error.md 9 | docs/GetCredentialResponse.md 10 | docs/GetCredentialsResponse.md 11 | docs/UpdateCredentialRequest.md 12 | git_push.sh 13 | src/apis/configuration.rs 14 | src/apis/default_api.rs 15 | src/apis/mod.rs 16 | src/lib.rs 17 | src/models/create_credential_request.rs 18 | src/models/error.rs 19 | src/models/get_credential_response.rs 20 | src/models/get_credentials_response.rs 21 | src/models/mod.rs 22 | src/models/update_credential_request.rs 23 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/.openapi-generator/VERSION: -------------------------------------------------------------------------------- 1 | 6.2.0 -------------------------------------------------------------------------------- /databricks-rust-git-credentials/.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "databricks_rust_git_credentials" 3 | version = "2.1.2" 4 | authors = ["OpenAPI Generator team and contributors"] 5 | edition = "2018" 6 | 7 | [dependencies] 8 | schemars = "0.8.11" 9 | serde = { version = "^1.0", features = ["derive"] } 10 | serde_derive = "^1.0" 11 | serde_with = "^2.0" 12 | serde_json = "^1.0" 13 | url = "^2.5" 14 | uuid = { version = "^1.8", features = ["serde", "v4"] } 15 | reqwest = { version = "^0.12", features = ["json", "multipart"] } 16 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/docs/CreateCredentialRequest.md: -------------------------------------------------------------------------------- 1 | # CreateCredentialRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **personal_access_token** | **String** | The personal access token used to authenticate to the corresponding Git provider. | 8 | **git_username** | **String** | Git username. | 9 | **git_provider** | **String** | Git provider. This field is case-insensitive. The available Git providers are awsCodeCommit, azureDevOpsServices, bitbucketCloud, bitbucketServer, gitHub, gitHubEnterprise, gitLab, and gitLabEnterpriseEdition. | 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/docs/Error.md: -------------------------------------------------------------------------------- 1 | # Error 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **error_code** | Option<**String**> | Error code | [optional] 8 | **message** | Option<**String**> | Human-readable error message describing the cause of the error. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/docs/GetCredentialResponse.md: -------------------------------------------------------------------------------- 1 | # GetCredentialResponse 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **credential_id** | Option<**i64**> | ID of the credential object in the workspace. | [optional] 8 | **git_username** | Option<**String**> | Git username. | [optional] 9 | **git_provider** | Option<**String**> | Git provider. This field is case-insensitive. The available Git providers are awsCodeCommit, azureDevOpsServices, bitbucketCloud, bitbucketServer, gitHub, gitHubEnterprise, gitLab, and gitLabEnterpriseEdition. | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/docs/GetCredentialsResponse.md: -------------------------------------------------------------------------------- 1 | # GetCredentialsResponse 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **credentials** | Option<[**Vec**](GetCredentialResponse.md)> | | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/docs/README.md: -------------------------------------------------------------------------------- 1 | # docs 2 | 3 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/docs/UpdateCredentialRequest.md: -------------------------------------------------------------------------------- 1 | # UpdateCredentialRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **personal_access_token** | **String** | The personal access token used to authenticate to the corresponding Git provider. | 8 | **git_username** | Option<**String**> | Git username. | [optional] 9 | **git_provider** | Option<**String**> | Git provider. This field is case-insensitive. The available Git providers are awsCodeCommit, azureDevOpsServices, bitbucketCloud, bitbucketServer, gitHub, gitHubEnterprise, gitLab, and gitLabEnterpriseEdition. | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde_derive; 3 | 4 | extern crate reqwest; 5 | extern crate serde; 6 | extern crate serde_json; 7 | extern crate url; 8 | 9 | pub mod apis; 10 | pub mod models; 11 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/src/models/error.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Git Credentials API 4 | * 5 | * The Git credentials API allows users to manage their [Git credentials](https://docs.databricks.com/repos.html#configure-your-git-integration-with-databricks) to use [Databricks Repos](https://docs.databricks.com/repos.html). 6 | * 7 | * The version of the OpenAPI document: 2.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | #[derive(JsonSchema, Clone, Debug, PartialEq, Default, Serialize, Deserialize)] 13 | pub struct Error { 14 | /// Error code 15 | #[serde(rename = "error_code", skip_serializing_if = "Option::is_none")] 16 | pub error_code: Option, 17 | /// Human-readable error message describing the cause of the error. 18 | #[serde(rename = "message", skip_serializing_if = "Option::is_none")] 19 | pub message: Option, 20 | } 21 | 22 | impl Error { 23 | pub fn new() -> Error { 24 | Error { 25 | error_code: None, 26 | message: None, 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/src/models/get_credentials_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Git Credentials API 4 | * 5 | * The Git credentials API allows users to manage their [Git credentials](https://docs.databricks.com/repos.html#configure-your-git-integration-with-databricks) to use [Databricks Repos](https://docs.databricks.com/repos.html). 6 | * 7 | * The version of the OpenAPI document: 2.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | #[derive(JsonSchema, Clone, Debug, PartialEq, Default, Serialize, Deserialize)] 13 | pub struct GetCredentialsResponse { 14 | #[serde(rename = "credentials", skip_serializing_if = "Option::is_none")] 15 | pub credentials: Option>, 16 | } 17 | 18 | impl GetCredentialsResponse { 19 | pub fn new() -> GetCredentialsResponse { 20 | GetCredentialsResponse { credentials: None } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /databricks-rust-git-credentials/src/models/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod create_credential_request; 2 | pub use self::create_credential_request::CreateCredentialRequest; 3 | pub mod error; 4 | pub use self::error::Error; 5 | pub mod get_credential_response; 6 | pub use self::get_credential_response::GetCredentialResponse; 7 | pub mod get_credentials_response; 8 | pub use self::get_credentials_response::GetCredentialsResponse; 9 | pub mod update_credential_request; 10 | pub use self::update_credential_request::UpdateCredentialRequest; 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | **/*.rs.bk 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /databricks-rust-jobs/.openapi-generator-ignore: -------------------------------------------------------------------------------- 1 | # OpenAPI Generator Ignore 2 | # Generated by openapi-generator https://github.com/openapitools/openapi-generator 3 | 4 | # Use this file to prevent files from being overwritten by the generator. 5 | # The patterns follow closely to .gitignore or .dockerignore. 6 | 7 | # As an example, the C# client generator defines ApiClient.cs. 8 | # You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: 9 | #ApiClient.cs 10 | 11 | # You can match any string of characters against a directory, file or extension with a single asterisk (*): 12 | #foo/*/qux 13 | # The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux 14 | 15 | # You can recursively match patterns against a directory, file or extension with a double asterisk (**): 16 | #foo/**/qux 17 | # This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux 18 | 19 | # You can also negate patterns with an exclamation (!). 20 | # For example, you can ignore all files in a docs folder with the file extension .md: 21 | #docs/*.md 22 | # Then explicitly reverse the ignore rule for a single file: 23 | #!docs/README.md 24 | -------------------------------------------------------------------------------- /databricks-rust-jobs/.openapi-generator/VERSION: -------------------------------------------------------------------------------- 1 | 7.2.0 -------------------------------------------------------------------------------- /databricks-rust-jobs/.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | -------------------------------------------------------------------------------- /databricks-rust-jobs/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "databricks_rust_jobs" 3 | version = "2.1.4" 4 | authors = ["OpenAPI Generator team and contributors"] 5 | description = "The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs." 6 | # Override this license by providing a License Object in the OpenAPI. 7 | license = "Unlicense" 8 | edition = "2018" 9 | 10 | [dependencies] 11 | schemars = "0.8.11" 12 | serde = { version = "^1.0", features = ["derive"] } 13 | serde_derive = "^1.0" 14 | serde_with = "^2.0" 15 | serde_json = "^1.0" 16 | url = "^2.5" 17 | uuid = { version = "^1.8", features = ["serde", "v4"] } 18 | reqwest = { version = "^0.12", features = ["json", "multipart"] } 19 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/AccessControlList.md: -------------------------------------------------------------------------------- 1 | # AccessControlList 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **access_control_list** | Option<[**Vec**](AccessControlRequest.md)> | List of permissions to set on the job. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/AccessControlRequest.md: -------------------------------------------------------------------------------- 1 | # AccessControlRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **user_name** | Option<**String**> | Email address for the user. | [optional] 8 | **permission_level** | Option<[**crate::models::PermissionLevel**](PermissionLevel.md)> | | [optional] 9 | **group_name** | Option<**String**> | Group name. There are two built-in groups: `users` for all users, and `admins` for administrators. | [optional] 10 | **service_principal_name** | Option<**String**> | Name of an Azure service principal. | [optional] 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/AccessControlRequestForGroup.md: -------------------------------------------------------------------------------- 1 | # AccessControlRequestForGroup 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **group_name** | Option<**String**> | Group name. There are two built-in groups: `users` for all users, and `admins` for administrators. | [optional] 8 | **permission_level** | Option<[**crate::models::PermissionLevelForGroup**](PermissionLevelForGroup.md)> | | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/AccessControlRequestForServicePrincipal.md: -------------------------------------------------------------------------------- 1 | # AccessControlRequestForServicePrincipal 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **service_principal_name** | Option<**String**> | Name of an Azure service principal. | [optional] 8 | **permission_level** | Option<[**crate::models::PermissionLevel**](PermissionLevel.md)> | | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/AccessControlRequestForUser.md: -------------------------------------------------------------------------------- 1 | # AccessControlRequestForUser 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **user_name** | Option<**String**> | Email address for the user. | [optional] 8 | **permission_level** | Option<[**crate::models::PermissionLevel**](PermissionLevel.md)> | | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/AutoScale.md: -------------------------------------------------------------------------------- 1 | # AutoScale 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **min_workers** | Option<**i32**> | The minimum number of workers to which the cluster can scale down when underutilized. It is also the initial number of workers the cluster has after creation. | [optional] 8 | **max_workers** | Option<**i32**> | The maximum number of workers to which the cluster can scale up when overloaded. max_workers must be strictly greater than min_workers. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/CanManage.md: -------------------------------------------------------------------------------- 1 | # CanManage 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/CanManageRun.md: -------------------------------------------------------------------------------- 1 | # CanManageRun 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/CanView.md: -------------------------------------------------------------------------------- 1 | # CanView 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterCloudProviderNodeInfo.md: -------------------------------------------------------------------------------- 1 | # ClusterCloudProviderNodeInfo 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **status** | Option<[**crate::models::ClusterCloudProviderNodeStatus**](ClusterCloudProviderNodeStatus.md)> | | [optional] 8 | **available_core_quota** | Option<**i32**> | Available CPU core quota. | [optional] 9 | **total_core_quota** | Option<**i32**> | Total CPU core quota. | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterCloudProviderNodeStatus.md: -------------------------------------------------------------------------------- 1 | # ClusterCloudProviderNodeStatus 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterEvent.md: -------------------------------------------------------------------------------- 1 | # ClusterEvent 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **cluster_id** | **String** | Canonical identifier for the cluster. This field is required. | 8 | **timestamp** | Option<**i64**> | The timestamp when the event occurred, stored as the number of milliseconds since the unix epoch. Assigned by the Timeline service. | [optional] 9 | **r#type** | [**crate::models::ClusterEventType**](ClusterEventType.md) | | 10 | **details** | [**crate::models::EventDetails**](EventDetails.md) | | 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterEventType.md: -------------------------------------------------------------------------------- 1 | # ClusterEventType 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterInstance.md: -------------------------------------------------------------------------------- 1 | # ClusterInstance 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **cluster_id** | Option<**String**> | The canonical identifier for the cluster used by a run. This field is always available for runs on existing clusters. For runs on new clusters, it becomes available once the cluster is created. This value can be used to view logs by browsing to `/#setting/sparkui/$cluster_id/driver-logs`. The logs continue to be available after the run completes. The response won’t include this field if the identifier is not available yet. | [optional] 8 | **spark_context_id** | Option<**String**> | The canonical identifier for the Spark context used by a run. This field is filled in once the run begins execution. This value can be used to view the Spark UI by browsing to `/#setting/sparkui/$cluster_id/$spark_context_id`. The Spark UI continues to be available after the run has completed. The response won’t include this field if the identifier is not available yet. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterLibraryStatuses.md: -------------------------------------------------------------------------------- 1 | # ClusterLibraryStatuses 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **cluster_id** | Option<**String**> | Unique identifier for the cluster. | [optional] 8 | **library_statuses** | Option<[**Vec**](LibraryFullStatus.md)> | Status of all libraries on the cluster. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterLogConf.md: -------------------------------------------------------------------------------- 1 | # ClusterLogConf 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **dbfs** | Option<[**crate::models::DbfsStorageInfo**](DbfsStorageInfo.md)> | | [optional] 8 | **s3** | Option<[**crate::models::S3StorageInfo**](S3StorageInfo.md)> | | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterSize.md: -------------------------------------------------------------------------------- 1 | # ClusterSize 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **num_workers** | Option<**i32**> | If num_workers, number of worker nodes that this cluster must have. A cluster has one Spark driver and num_workers executors for a total of num_workers + 1 Spark nodes. When reading the properties of a cluster, this field reflects the desired number of workers rather than the actual number of workers. For instance, if a cluster is resized from 5 to 10 workers, this field is updated to reflect the target size of 10 workers, whereas the workers listed in executors gradually increase from 5 to 10 as the new nodes are provisioned. | [optional] 8 | **autoscale** | Option<[**crate::models::AutoScale**](AutoScale.md)> | | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterSource.md: -------------------------------------------------------------------------------- 1 | # ClusterSource 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterSpec.md: -------------------------------------------------------------------------------- 1 | # ClusterSpec 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **existing_cluster_id** | Option<**String**> | If existing_cluster_id, the ID of an existing cluster that is used for all runs of this job. When running jobs on an existing cluster, you may need to manually restart the cluster if it stops responding. We suggest running jobs on new clusters for greater reliability. | [optional] 8 | **new_cluster** | Option<[**crate::models::NewCluster**](NewCluster.md)> | | [optional] 9 | **libraries** | Option<[**Vec**](Library.md)> | An optional list of libraries to be installed on the cluster that executes the job. The default value is an empty list. | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ClusterState.md: -------------------------------------------------------------------------------- 1 | # ClusterState 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/Continuous.md: -------------------------------------------------------------------------------- 1 | # Continuous 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **pause_status** | Option<**String**> | Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/CronSchedule.md: -------------------------------------------------------------------------------- 1 | # CronSchedule 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **quartz_cron_expression** | **String** | A Cron expression using Quartz syntax that describes the schedule for a job. See [Cron Trigger](http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html) for details. This field is required. | 8 | **timezone_id** | **String** | A Java timezone ID. The schedule for a job is resolved with respect to this timezone. See [Java TimeZone](https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html) for details. This field is required. | 9 | **pause_status** | Option<**String**> | Indicate whether this schedule is paused or not. | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/DbfsStorageInfo.md: -------------------------------------------------------------------------------- 1 | # DbfsStorageInfo 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **destination** | Option<**String**> | DBFS destination. Example: `dbfs:/my/path` | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/DbtOutput.md: -------------------------------------------------------------------------------- 1 | # DbtOutput 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **artifacts_link** | Option<**String**> | A pre-signed URL to download the (compressed) dbt artifacts. This link is valid for a limited time (30 minutes). This information is only available after the run has finished. | [optional] 8 | **artifacts_headers** | Option<[**serde_json::Value**](.md)> | An optional map of headers to send when retrieving the artifact from the `artifacts_link`. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/DockerBasicAuth.md: -------------------------------------------------------------------------------- 1 | # DockerBasicAuth 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **username** | Option<**String**> | User name for the Docker repository. | [optional] 8 | **password** | Option<**String**> | Password for the Docker repository. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/DockerImage.md: -------------------------------------------------------------------------------- 1 | # DockerImage 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **url** | Option<**String**> | URL for the Docker image. | [optional] 8 | **basic_auth** | Option<[**crate::models::DockerBasicAuth**](DockerBasicAuth.md)> | | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/Error.md: -------------------------------------------------------------------------------- 1 | # Error 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **error_code** | Option<**String**> | Error code | [optional] 8 | **message** | Option<**String**> | Human-readable error message that describes the cause of the error. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/EventDetails.md: -------------------------------------------------------------------------------- 1 | # EventDetails 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **current_num_workers** | Option<**i32**> | The number of nodes in the cluster. | [optional] 8 | **target_num_workers** | Option<**i32**> | The targeted number of nodes in the cluster. | [optional] 9 | **previous_attributes** | Option<[**crate::models::AwsAttributes**](AwsAttributes.md)> | | [optional] 10 | **attributes** | Option<[**crate::models::AwsAttributes**](AwsAttributes.md)> | | [optional] 11 | **previous_cluster_size** | Option<[**crate::models::ClusterSize**](ClusterSize.md)> | | [optional] 12 | **cluster_size** | Option<[**crate::models::ClusterSize**](ClusterSize.md)> | | [optional] 13 | **cause** | Option<[**crate::models::ResizeCause**](ResizeCause.md)> | | [optional] 14 | **reason** | Option<[**crate::models::TerminationReason**](TerminationReason.md)> | | [optional] 15 | **user** | Option<**String**> | The user that caused the event to occur. (Empty if it was done by Databricks.) | [optional] 16 | 17 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 18 | 19 | 20 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/FileStorageInfo.md: -------------------------------------------------------------------------------- 1 | # FileStorageInfo 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **destination** | Option<**String**> | File destination. Example: `file:/my/file.sh` | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/GitSnapshot.md: -------------------------------------------------------------------------------- 1 | # GitSnapshot 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **used_commit** | Option<**String**> | Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/GitSource.md: -------------------------------------------------------------------------------- 1 | # GitSource 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **git_url** | Option<**String**> | URL of the repository to be cloned by this job. The maximum length is 300 characters. | [optional] 8 | **git_provider** | Option<**String**> | Unique identifier of the service used to host the Git repository. The value is case insensitive. | [optional] 9 | **git_branch** | Option<**String**> | Name of the branch to be checked out and used by this job. This field cannot be specified in conjunction with git_tag or git_commit. The maximum length is 255 characters. | [optional] 10 | **git_tag** | Option<**String**> | Name of the tag to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_commit. The maximum length is 255 characters. | [optional] 11 | **git_commit** | Option<**String**> | Commit to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_tag. The maximum length is 64 characters. | [optional] 12 | **git_snapshot** | Option<[**crate::models::GitSnapshot**](GitSnapshot.md)> | | [optional] 13 | 14 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 15 | 16 | 17 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/InitScriptInfo.md: -------------------------------------------------------------------------------- 1 | # InitScriptInfo 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **dbfs** | Option<[**crate::models::DbfsStorageInfo**](DbfsStorageInfo.md)> | | [optional] 8 | **file** | Option<[**crate::models::FileStorageInfo**](FileStorageInfo.md)> | | [optional] 9 | **s3** | Option<[**crate::models::S3StorageInfo**](S3StorageInfo.md)> | | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/IsOwner.md: -------------------------------------------------------------------------------- 1 | # IsOwner 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/Job.md: -------------------------------------------------------------------------------- 1 | # Job 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **job_id** | Option<**i64**> | The canonical identifier for this job. | [optional] 8 | **creator_user_name** | Option<**String**> | The creator user name. This field won’t be included in the response if the user has already been deleted. | [optional] 9 | **settings** | Option<[**crate::models::JobSettings**](JobSettings.md)> | | [optional] 10 | **created_time** | Option<**i64**> | The time at which this job was created in epoch milliseconds (milliseconds since 1/1/1970 UTC). | [optional] 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobCluster.md: -------------------------------------------------------------------------------- 1 | # JobCluster 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **job_cluster_key** | **String** | A unique name for the job cluster. This field is required and must be unique within the job. `JobTaskSettings` may refer to this field to determine which cluster to launch for the task execution. | 8 | **new_cluster** | Option<[**crate::models::NewCluster**](NewCluster.md)> | | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobSettingsQueue.md: -------------------------------------------------------------------------------- 1 | # JobSettingsQueue 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **enabled** | Option<**bool**> | | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobTask.md: -------------------------------------------------------------------------------- 1 | # JobTask 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **notebook_task** | Option<[**crate::models::NotebookTask**](NotebookTask.md)> | | [optional] 8 | **spark_jar_task** | Option<[**crate::models::SparkJarTask**](SparkJarTask.md)> | | [optional] 9 | **spark_python_task** | Option<[**crate::models::SparkPythonTask**](SparkPythonTask.md)> | | [optional] 10 | **spark_submit_task** | Option<[**crate::models::SparkSubmitTask**](SparkSubmitTask.md)> | | [optional] 11 | **pipeline_task** | Option<[**crate::models::PipelineTask**](PipelineTask.md)> | | [optional] 12 | **python_wheel_task** | Option<[**crate::models::PythonWheelTask**](PythonWheelTask.md)> | | [optional] 13 | **sql_task** | Option<[**crate::models::SqlTask**](SqlTask.md)> | | [optional] 14 | **dbt_task** | Option<[**crate::models::DbtTask**](DbtTask.md)> | | [optional] 15 | 16 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 17 | 18 | 19 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsCreate200Response.md: -------------------------------------------------------------------------------- 1 | # JobsCreate200Response 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **job_id** | Option<**i64**> | The canonical identifier for the newly created job. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsDeleteRequest.md: -------------------------------------------------------------------------------- 1 | # JobsDeleteRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **job_id** | **i64** | The canonical identifier of the job to delete. This field is required. | 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsGet200Response.md: -------------------------------------------------------------------------------- 1 | # JobsGet200Response 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **job_id** | Option<**i64**> | The canonical identifier for this job. | [optional] 8 | **creator_user_name** | Option<**String**> | The creator user name. This field won’t be included in the response if the user has been deleted. | [optional] 9 | **run_as_user_name** | Option<**String**> | The user name that the job runs as. `run_as_user_name` is based on the current job settings, and is set to the creator of the job if job access control is disabled, or the `is_owner` permission if job access control is enabled. | [optional] 10 | **settings** | Option<[**crate::models::JobSettings**](JobSettings.md)> | | [optional] 11 | **created_time** | Option<**i64**> | The time at which this job was created in epoch milliseconds (milliseconds since 1/1/1970 UTC). | [optional] 12 | 13 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 14 | 15 | 16 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsList200Response.md: -------------------------------------------------------------------------------- 1 | # JobsList200Response 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **jobs** | Option<[**Vec**](Job.md)> | The list of jobs. | [optional] 8 | **has_more** | Option<**bool**> | | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsResetRequest.md: -------------------------------------------------------------------------------- 1 | # JobsResetRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **job_id** | **i64** | The canonical identifier of the job to reset. This field is required. | 8 | **new_settings** | Option<[**crate::models::JobSettings**](JobSettings.md)> | | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsRunNow200Response.md: -------------------------------------------------------------------------------- 1 | # JobsRunNow200Response 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **run_id** | Option<**i64**> | The globally unique ID of the newly triggered run. | [optional] 8 | **number_in_job** | Option<**i64**> | A unique identifier for this job run. This is set to the same value as `run_id`. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsRunsCancelAllRequest.md: -------------------------------------------------------------------------------- 1 | # JobsRunsCancelAllRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **job_id** | **i64** | The canonical identifier of the job to cancel all runs of. This field is required. | 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsRunsCancelRequest.md: -------------------------------------------------------------------------------- 1 | # JobsRunsCancelRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **run_id** | **i64** | This field is required. | 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsRunsDeleteRequest.md: -------------------------------------------------------------------------------- 1 | # JobsRunsDeleteRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **run_id** | Option<**i64**> | The canonical identifier of the run for which to retrieve the metadata. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsRunsExport200Response.md: -------------------------------------------------------------------------------- 1 | # JobsRunsExport200Response 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **views** | Option<[**Vec**](ViewItem.md)> | The exported content in HTML format (one for every view item). | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsRunsList200Response.md: -------------------------------------------------------------------------------- 1 | # JobsRunsList200Response 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **runs** | Option<[**Vec**](Run.md)> | A list of runs, from most recently started to least. | [optional] 8 | **has_more** | Option<**bool**> | If true, additional runs matching the provided filter are available for listing. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsRunsRepair200Response.md: -------------------------------------------------------------------------------- 1 | # JobsRunsRepair200Response 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **repair_id** | Option<**i64**> | The ID of the repair. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsRunsSubmit200Response.md: -------------------------------------------------------------------------------- 1 | # JobsRunsSubmit200Response 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **run_id** | Option<**i64**> | The canonical identifier for the newly submitted run. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/JobsUpdateRequest.md: -------------------------------------------------------------------------------- 1 | # JobsUpdateRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **job_id** | **i64** | The canonical identifier of the job to update. This field is required. | 8 | **new_settings** | Option<[**crate::models::JobSettings**](JobSettings.md)> | | [optional] 9 | **fields_to_remove** | Option<**Vec**> | Remove top-level fields in the job settings. Removing nested fields is not supported. This field is optional. | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/LibraryFullStatus.md: -------------------------------------------------------------------------------- 1 | # LibraryFullStatus 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **library** | Option<[**crate::models::Library**](Library.md)> | | [optional] 8 | **status** | Option<[**crate::models::LibraryInstallStatus**](LibraryInstallStatus.md)> | | [optional] 9 | **messages** | Option<**Vec**> | All the info and warning messages that have occurred so far for this library. | [optional] 10 | **is_library_for_all_clusters** | Option<**bool**> | Whether the library was set to be installed on all clusters via the libraries UI. | [optional] 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/LibraryInstallStatus.md: -------------------------------------------------------------------------------- 1 | # LibraryInstallStatus 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ListOrder.md: -------------------------------------------------------------------------------- 1 | # ListOrder 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/LogSyncStatus.md: -------------------------------------------------------------------------------- 1 | # LogSyncStatus 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **last_attempted** | Option<**i64**> | The timestamp of last attempt. If the last attempt fails, last_exception contains the exception in the last attempt. | [optional] 8 | **last_exception** | Option<**String**> | The exception thrown in the last attempt, it would be null (omitted in the response) if there is no exception in last attempted. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/MavenLibrary.md: -------------------------------------------------------------------------------- 1 | # MavenLibrary 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **coordinates** | **String** | Gradle-style Maven coordinates. For example: `org.jsoup:jsoup:1.7.2`. This field is required. | 8 | **repo** | Option<**String**> | Maven repo to install the Maven package from. If omitted, both Maven Central Repository and Spark Packages are searched. | [optional] 9 | **exclusions** | Option<**Vec**> | List of dependences to exclude. For example: `[\"slf4j:slf4j\", \"*:hadoop-client\"]`. Maven dependency exclusions: . | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/NodeType.md: -------------------------------------------------------------------------------- 1 | # NodeType 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **node_type_id** | **String** | Unique identifier for this node type. This field is required. | 8 | **memory_mb** | **i32** | Memory (in MB) available for this node type. This field is required. | 9 | **num_cores** | Option<**f32**> | Number of CPU cores available for this node type. This can be fractional if the number of cores on a machine instance is not divisible by the number of Spark nodes on that machine. This field is required. | [optional] 10 | **description** | **String** | A string description associated with this node type. This field is required. | 11 | **instance_type_id** | **String** | An identifier for the type of hardware that this node runs on. This field is required. | 12 | **is_deprecated** | Option<**bool**> | Whether the node type is deprecated. Non-deprecated node types offer greater performance. | [optional] 13 | **node_info** | Option<[**crate::models::ClusterCloudProviderNodeInfo**](ClusterCloudProviderNodeInfo.md)> | | [optional] 14 | 15 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 16 | 17 | 18 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/NotebookOutput.md: -------------------------------------------------------------------------------- 1 | # NotebookOutput 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **result** | Option<**String**> | The value passed to [dbutils.notebook.exit()](https://docs.databricks.com/notebooks/notebook-workflows.html#notebook-workflows-exit). Databricks restricts this API to return the first 5 MB of the value. For a larger result, your job can store the results in a cloud storage service. This field is absent if `dbutils.notebook.exit()` was never called. | [optional] 8 | **truncated** | Option<**bool**> | Whether or not the result was truncated. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/PermissionLevel.md: -------------------------------------------------------------------------------- 1 | # PermissionLevel 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/PermissionLevelForGroup.md: -------------------------------------------------------------------------------- 1 | # PermissionLevelForGroup 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/PipelineTask.md: -------------------------------------------------------------------------------- 1 | # PipelineTask 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **pipeline_id** | Option<**String**> | The full name of the pipeline task to execute. | [optional] 8 | **full_refresh** | Option<**bool**> | If true, a full refresh will be triggered on the delta live table. | [optional][default to false] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/PoolClusterTerminationCode.md: -------------------------------------------------------------------------------- 1 | # PoolClusterTerminationCode 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/PythonPyPiLibrary.md: -------------------------------------------------------------------------------- 1 | # PythonPyPiLibrary 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **package** | **String** | The name of the PyPI package to install. An optional exact version specification is also supported. Examples: `simplejson` and `simplejson==3.8.0`. This field is required. | 8 | **repo** | Option<**String**> | The repository where the package can be found. If not specified, the default pip index is used. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/PythonWheelTask.md: -------------------------------------------------------------------------------- 1 | # PythonWheelTask 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **package_name** | Option<**String**> | Name of the package to execute | [optional] 8 | **entry_point** | Option<**String**> | Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()` | [optional] 9 | **parameters** | Option<**Vec**> | Command-line parameters passed to Python wheel task. Leave it empty if `named_parameters` is not null. | [optional] 10 | **named_parameters** | Option<[**::std::collections::HashMap**](serde_json::Value.md)> | Command-line parameters passed to Python wheel task in the form of `[\"--name=task\", \"--data=dbfs:/path/to/data.json\"]`. Leave it empty if `parameters` is not null. | [optional] 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/RCranLibrary.md: -------------------------------------------------------------------------------- 1 | # RCranLibrary 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **package** | **String** | The name of the CRAN package to install. This field is required. | 8 | **repo** | Option<**String**> | The repository where the package can be found. If not specified, the default CRAN repo is used. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/RepairHistory.md: -------------------------------------------------------------------------------- 1 | # RepairHistory 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **repair_history** | Option<[**Vec**](RepairHistoryItem.md)> | The repair history of the run. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/RepairHistoryItem.md: -------------------------------------------------------------------------------- 1 | # RepairHistoryItem 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **r#type** | Option<**String**> | The repair history item type. Indicates whether a run is the original run or a repair run. | [optional] 8 | **start_time** | Option<**i64**> | The start time of the (repaired) run. | [optional] 9 | **end_time** | Option<**i64**> | The end time of the (repaired) run. | [optional] 10 | **state** | Option<[**crate::models::RunState**](RunState.md)> | | [optional] 11 | **id** | Option<**i64**> | The ID of the repair. Only returned for the items that represent a repair in `repair_history`. | [optional] 12 | **task_run_ids** | Option<**Vec**> | The run IDs of the task runs that ran as part of this repair history item. | [optional] 13 | 14 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 15 | 16 | 17 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/RepairRunInput.md: -------------------------------------------------------------------------------- 1 | # RepairRunInput 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **run_id** | Option<**i64**> | The job run ID of the run to repair. The run must not be in progress. | [optional] 8 | **rerun_tasks** | Option<**Vec**> | The task keys of the task runs to repair. | [optional] 9 | **latest_repair_id** | Option<**i64**> | The ID of the latest repair. This parameter is not required when repairing a run for the first time, but must be provided on subsequent requests to repair the same run. | [optional] 10 | **rerun_all_failed_tasks** | Option<**bool**> | If true, repair all failed tasks. Only one of rerun_tasks or rerun_all_failed_tasks can be used. | [optional][default to false] 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ResizeCause.md: -------------------------------------------------------------------------------- 1 | # ResizeCause 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/RunLifeCycleState.md: -------------------------------------------------------------------------------- 1 | # RunLifeCycleState 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/RunNowInput.md: -------------------------------------------------------------------------------- 1 | # RunNowInput 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **job_id** | Option<**i64**> | The ID of the job to be executed | [optional] 8 | **idempotency_token** | Option<**String**> | An optional token to guarantee the idempotency of job run requests. If a run with the provided token already exists, the request does not create a new run but returns the ID of the existing run instead. If a run with the provided token is deleted, an error is returned. If you specify the idempotency token, upon failure you can retry until the request succeeds. Databricks guarantees that exactly one run is launched with that idempotency token. This token must have at most 64 characters. For more information, see [How to ensure idempotency for jobs](https://kb.databricks.com/jobs/jobs-idempotency.html). | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/RunParametersPipelineParams.md: -------------------------------------------------------------------------------- 1 | # RunParametersPipelineParams 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **full_refresh** | Option<**bool**> | If true, triggers a full refresh on the delta live table. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/RunResultState.md: -------------------------------------------------------------------------------- 1 | # RunResultState 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/RunState.md: -------------------------------------------------------------------------------- 1 | # RunState 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **life_cycle_state** | Option<[**crate::models::RunLifeCycleState**](RunLifeCycleState.md)> | | [optional] 8 | **result_state** | Option<[**crate::models::RunResultState**](RunResultState.md)> | | [optional] 9 | **user_cancelled_or_timedout** | Option<**bool**> | Whether a run was canceled manually by a user or by the scheduler because the run timed out. | [optional] 10 | **state_message** | Option<**String**> | A descriptive message for the current state. This field is unstructured, and its exact format is subject to change. | [optional] 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/RunType.md: -------------------------------------------------------------------------------- 1 | # RunType 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SparkJarTask.md: -------------------------------------------------------------------------------- 1 | # SparkJarTask 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **main_class_name** | Option<**String**> | The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library. The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail. | [optional] 8 | **parameters** | Option<**Vec**> | Parameters passed to the main method. Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. | [optional] 9 | **jar_uri** | Option<**String**> | Deprecated since 04/2016\\. Provide a `jar` through the `libraries` field instead. For an example, see [Create](https://docs.databricks.com/dev-tools/api/latest/jobs.html#operation/JobsCreate). | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SparkNode.md: -------------------------------------------------------------------------------- 1 | # SparkNode 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **private_ip** | Option<**String**> | Private IP address (typically a 10.x.x.x address) of the Spark node. This is different from the private IP address of the host instance. | [optional] 8 | **public_dns** | Option<**String**> | Public DNS address of this node. This address can be used to access the Spark JDBC server on the driver node. To communicate with the JDBC server, traffic must be manually authorized by adding security group rules to the “worker-unmanaged” security group via the AWS console. | [optional] 9 | **node_id** | Option<**String**> | Globally unique identifier for this node. | [optional] 10 | **instance_id** | Option<**String**> | Globally unique identifier for the host instance from the cloud provider. | [optional] 11 | **start_timestamp** | Option<**i64**> | The timestamp (in millisecond) when the Spark node is launched. | [optional] 12 | **node_aws_attributes** | Option<[**crate::models::SparkNodeAwsAttributes**](SparkNodeAwsAttributes.md)> | | [optional] 13 | **host_private_ip** | Option<**String**> | The private IP address of the host instance. | [optional] 14 | 15 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 16 | 17 | 18 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SparkNodeAwsAttributes.md: -------------------------------------------------------------------------------- 1 | # SparkNodeAwsAttributes 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **is_spot** | Option<**bool**> | Whether this node is on an Amazon spot instance. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SparkPythonTask.md: -------------------------------------------------------------------------------- 1 | # SparkPythonTask 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **python_file** | **String** | The Python file to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. This field is required. | 8 | **parameters** | Option<**Vec**> | Command line parameters passed to the Python file. Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SparkSubmitTask.md: -------------------------------------------------------------------------------- 1 | # SparkSubmitTask 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **parameters** | Option<**Vec**> | Command-line parameters passed to spark submit. Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SparkVersion.md: -------------------------------------------------------------------------------- 1 | # SparkVersion 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **key** | Option<**String**> | [Databricks Runtime version](https://docs.databricks.com/dev-tools/api/latest/index.html#programmatic-version) key, for example `7.3.x-scala2.12`. The value that must be provided as the `spark_version` when creating a new cluster. The exact runtime version may change over time for a “wildcard” version (that is, `7.3.x-scala2.12` is a “wildcard” version) with minor bug fixes. | [optional] 8 | **name** | Option<**String**> | A descriptive name for the runtime version, for example “Databricks Runtime 7.3 LTS”. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlAlertOutput.md: -------------------------------------------------------------------------------- 1 | # SqlAlertOutput 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **query_text** | Option<**String**> | The text of the SQL query. Can Run permission of the SQL query associated with the SQL alert is required to view this field. | [optional] 8 | **warehouse_id** | Option<**String**> | The canonical identifier of the SQL warehouse. | [optional] 9 | **sql_statements** | Option<[**crate::models::SqlStatementOutput**](SqlStatementOutput.md)> | | [optional] 10 | **output_link** | Option<**String**> | The link to find the output results. | [optional] 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlDashboardOutput.md: -------------------------------------------------------------------------------- 1 | # SqlDashboardOutput 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **widgets** | Option<[**crate::models::SqlDashboardWidgetOutput**](SqlDashboardWidgetOutput.md)> | | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlDashboardWidgetOutput.md: -------------------------------------------------------------------------------- 1 | # SqlDashboardWidgetOutput 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **widget_id** | Option<**String**> | The canonical identifier of the SQL widget. | [optional] 8 | **widget_title** | Option<**String**> | The title of the SQL widget. | [optional] 9 | **output_link** | Option<**String**> | The link to find the output results. | [optional] 10 | **status** | Option<**String**> | The execution status of the SQL widget. | [optional] 11 | **error** | Option<[**crate::models::SqlOutputError**](SqlOutputError.md)> | | [optional] 12 | **start_time** | Option<**i64**> | Time (in epoch milliseconds) when execution of the SQL widget starts. | [optional] 13 | **end_time** | Option<**i64**> | Time (in epoch milliseconds) when execution of the SQL widget ends. | [optional] 14 | 15 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 16 | 17 | 18 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlOutput.md: -------------------------------------------------------------------------------- 1 | # SqlOutput 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **query_output** | Option<[**crate::models::SqlQueryOutput**](SqlQueryOutput.md)> | | [optional] 8 | **dashboard_output** | Option<[**crate::models::SqlDashboardOutput**](SqlDashboardOutput.md)> | | [optional] 9 | **alert_output** | Option<[**crate::models::SqlAlertOutput**](SqlAlertOutput.md)> | | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlOutputError.md: -------------------------------------------------------------------------------- 1 | # SqlOutputError 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **message** | Option<**String**> | The error message when execution fails. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlQueryOutput.md: -------------------------------------------------------------------------------- 1 | # SqlQueryOutput 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **query_text** | Option<**String**> | The text of the SQL query. Can Run permission of the SQL query is required to view this field. | [optional] 8 | **warehouse_id** | Option<**String**> | The canonical identifier of the SQL warehouse. | [optional] 9 | **sql_statements** | Option<[**crate::models::SqlStatementOutput**](SqlStatementOutput.md)> | | [optional] 10 | **output_link** | Option<**String**> | The link to find the output results. | [optional] 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlStatementOutput.md: -------------------------------------------------------------------------------- 1 | # SqlStatementOutput 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **lookup_key** | Option<**String**> | A key that can be used to look up query details. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlTask.md: -------------------------------------------------------------------------------- 1 | # SqlTask 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **query** | Option<[**crate::models::SqlTaskQuery**](SqlTaskQuery.md)> | | [optional] 8 | **dashboard** | Option<[**crate::models::SqlTaskDashboard**](SqlTaskDashboard.md)> | | [optional] 9 | **alert** | Option<[**crate::models::SqlTaskAlert**](SqlTaskAlert.md)> | | [optional] 10 | **parameters** | Option<[**::std::collections::HashMap**](serde_json::Value.md)> | Parameters to be used for each run of this job. The SQL alert task does not support custom parameters. | [optional] 11 | **warehouse_id** | **String** | The canonical identifier of the SQL warehouse. Only serverless and pro SQL warehouses are supported. | 12 | 13 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 14 | 15 | 16 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlTaskAlert.md: -------------------------------------------------------------------------------- 1 | # SqlTaskAlert 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **alert_id** | **String** | The canonical identifier of the SQL alert. | 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlTaskDashboard.md: -------------------------------------------------------------------------------- 1 | # SqlTaskDashboard 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **dashboard_id** | **String** | The canonical identifier of the SQL dashboard. | 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/SqlTaskQuery.md: -------------------------------------------------------------------------------- 1 | # SqlTaskQuery 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **query_id** | **String** | The canonical identifier of the SQL query. | 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/TaskDependenciesInner.md: -------------------------------------------------------------------------------- 1 | # TaskDependenciesInner 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **task_key** | Option<**String**> | | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/TaskSparkSubmitTask.md: -------------------------------------------------------------------------------- 1 | # TaskSparkSubmitTask 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **parameters** | Option<**Vec**> | Command-line parameters passed to spark submit. Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/TerminationCode.md: -------------------------------------------------------------------------------- 1 | # TerminationCode 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/TerminationReason.md: -------------------------------------------------------------------------------- 1 | # TerminationReason 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **code** | Option<[**crate::models::TerminationCode**](TerminationCode.md)> | | [optional] 8 | **r#type** | Option<[**crate::models::TerminationType**](TerminationType.md)> | | [optional] 9 | **parameters** | Option<[**::std::collections::HashMap**](serde_json::Value.md)> | An object with additional information about why a cluster was terminated. The object keys are one of `TerminationParameter` and the value is the termination information. | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/TerminationType.md: -------------------------------------------------------------------------------- 1 | # TerminationType 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/TriggerType.md: -------------------------------------------------------------------------------- 1 | # TriggerType 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ViewItem.md: -------------------------------------------------------------------------------- 1 | # ViewItem 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **content** | Option<**String**> | Content of the view. | [optional] 8 | **name** | Option<**String**> | Name of the view item. In the case of code view, it would be the notebook’s name. In the case of dashboard view, it would be the dashboard’s name. | [optional] 9 | **r#type** | Option<[**crate::models::ViewType**](ViewType.md)> | | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ViewType.md: -------------------------------------------------------------------------------- 1 | # ViewType 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/ViewsToExport.md: -------------------------------------------------------------------------------- 1 | # ViewsToExport 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | 8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 9 | 10 | 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/WebhookNotifications.md: -------------------------------------------------------------------------------- 1 | # WebhookNotifications 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **on_start** | Option<[**Vec**](WebhookNotifications_on_start_inner.md)> | An optional list of notification IDs to call when the run starts. A maximum of 3 destinations can be specified for the `on_start` property. | [optional] 8 | **on_success** | Option<[**Vec**](WebhookNotifications_on_start_inner.md)> | An optional list of notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property. | [optional] 9 | **on_failure** | Option<[**Vec**](WebhookNotifications_on_start_inner.md)> | An optional list of notification IDs to call when the run fails. A maximum of 3 destinations can be specified for the `on_failure` property. | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-jobs/docs/WebhookNotificationsOnStartInner.md: -------------------------------------------------------------------------------- 1 | # WebhookNotificationsOnStartInner 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **id** | Option<**String**> | | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde_derive; 3 | 4 | extern crate serde; 5 | extern crate serde_json; 6 | extern crate url; 7 | extern crate reqwest; 8 | 9 | pub mod apis; 10 | pub mod models; 11 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/access_control_list.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct AccessControlList { 17 | /// List of permissions to set on the job. 18 | #[serde(rename = "access_control_list", skip_serializing_if = "Option::is_none")] 19 | pub access_control_list: Option>, 20 | } 21 | 22 | impl AccessControlList { 23 | pub fn new() -> AccessControlList { 24 | AccessControlList { 25 | access_control_list: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/access_control_request_for_group.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct AccessControlRequestForGroup { 17 | /// Group name. There are two built-in groups: `users` for all users, and `admins` for administrators. 18 | #[serde(rename = "group_name", skip_serializing_if = "Option::is_none")] 19 | pub group_name: Option, 20 | #[serde(rename = "permission_level", skip_serializing_if = "Option::is_none")] 21 | pub permission_level: Option>, 22 | } 23 | 24 | impl AccessControlRequestForGroup { 25 | pub fn new() -> AccessControlRequestForGroup { 26 | AccessControlRequestForGroup { 27 | group_name: None, 28 | permission_level: None, 29 | } 30 | } 31 | } 32 | 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/access_control_request_for_user.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct AccessControlRequestForUser { 17 | /// Email address for the user. 18 | #[serde(rename = "user_name", skip_serializing_if = "Option::is_none")] 19 | pub user_name: Option, 20 | #[serde(rename = "permission_level", skip_serializing_if = "Option::is_none")] 21 | pub permission_level: Option>, 22 | } 23 | 24 | impl AccessControlRequestForUser { 25 | pub fn new() -> AccessControlRequestForUser { 26 | AccessControlRequestForUser { 27 | user_name: None, 28 | permission_level: None, 29 | } 30 | } 31 | } 32 | 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/can_manage.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | /// CanManage : Permission to manage the job. 13 | 14 | /// Permission to manage the job. 15 | #[derive(JsonSchema, Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] 16 | pub enum CanManage { 17 | #[serde(rename = "CAN_MANAGE")] 18 | CanManage, 19 | 20 | } 21 | 22 | impl ToString for CanManage { 23 | fn to_string(&self) -> String { 24 | match self { 25 | Self::CanManage => String::from("CAN_MANAGE"), 26 | } 27 | } 28 | } 29 | 30 | impl Default for CanManage { 31 | fn default() -> CanManage { 32 | Self::CanManage 33 | } 34 | } 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/can_manage_run.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | /// CanManageRun : Permission to run and/or manage runs for the job. 13 | 14 | /// Permission to run and/or manage runs for the job. 15 | #[derive(JsonSchema, Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] 16 | pub enum CanManageRun { 17 | #[serde(rename = "CAN_MANAGE_RUN")] 18 | CanManageRun, 19 | 20 | } 21 | 22 | impl ToString for CanManageRun { 23 | fn to_string(&self) -> String { 24 | match self { 25 | Self::CanManageRun => String::from("CAN_MANAGE_RUN"), 26 | } 27 | } 28 | } 29 | 30 | impl Default for CanManageRun { 31 | fn default() -> CanManageRun { 32 | Self::CanManageRun 33 | } 34 | } 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/can_view.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | /// CanView : Permission to view the settings of the job. 13 | 14 | /// Permission to view the settings of the job. 15 | #[derive(JsonSchema, Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] 16 | pub enum CanView { 17 | #[serde(rename = "CAN_VIEW")] 18 | CanView, 19 | 20 | } 21 | 22 | impl ToString for CanView { 23 | fn to_string(&self) -> String { 24 | match self { 25 | Self::CanView => String::from("CAN_VIEW"), 26 | } 27 | } 28 | } 29 | 30 | impl Default for CanView { 31 | fn default() -> CanView { 32 | Self::CanView 33 | } 34 | } 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/cluster_library_statuses.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct ClusterLibraryStatuses { 17 | /// Unique identifier for the cluster. 18 | #[serde(rename = "cluster_id", skip_serializing_if = "Option::is_none")] 19 | pub cluster_id: Option, 20 | /// Status of all libraries on the cluster. 21 | #[serde(rename = "library_statuses", skip_serializing_if = "Option::is_none")] 22 | pub library_statuses: Option>, 23 | } 24 | 25 | impl ClusterLibraryStatuses { 26 | pub fn new() -> ClusterLibraryStatuses { 27 | ClusterLibraryStatuses { 28 | cluster_id: None, 29 | library_statuses: None, 30 | } 31 | } 32 | } 33 | 34 | 35 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/cluster_log_conf.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct ClusterLogConf { 17 | #[serde(rename = "dbfs", skip_serializing_if = "Option::is_none")] 18 | pub dbfs: Option>, 19 | #[serde(rename = "s3", skip_serializing_if = "Option::is_none")] 20 | pub s3: Option>, 21 | } 22 | 23 | impl ClusterLogConf { 24 | pub fn new() -> ClusterLogConf { 25 | ClusterLogConf { 26 | dbfs: None, 27 | s3: None, 28 | } 29 | } 30 | } 31 | 32 | 33 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/dbfs_storage_info.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct DbfsStorageInfo { 17 | /// DBFS destination. Example: `dbfs:/my/path` 18 | #[serde(rename = "destination", skip_serializing_if = "Option::is_none")] 19 | pub destination: Option, 20 | } 21 | 22 | impl DbfsStorageInfo { 23 | pub fn new() -> DbfsStorageInfo { 24 | DbfsStorageInfo { 25 | destination: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/docker_basic_auth.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct DockerBasicAuth { 17 | /// User name for the Docker repository. 18 | #[serde(rename = "username", skip_serializing_if = "Option::is_none")] 19 | pub username: Option, 20 | /// Password for the Docker repository. 21 | #[serde(rename = "password", skip_serializing_if = "Option::is_none")] 22 | pub password: Option, 23 | } 24 | 25 | impl DockerBasicAuth { 26 | pub fn new() -> DockerBasicAuth { 27 | DockerBasicAuth { 28 | username: None, 29 | password: None, 30 | } 31 | } 32 | } 33 | 34 | 35 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/docker_image.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct DockerImage { 17 | /// URL for the Docker image. 18 | #[serde(rename = "url", skip_serializing_if = "Option::is_none")] 19 | pub url: Option, 20 | #[serde(rename = "basic_auth", skip_serializing_if = "Option::is_none")] 21 | pub basic_auth: Option>, 22 | } 23 | 24 | impl DockerImage { 25 | pub fn new() -> DockerImage { 26 | DockerImage { 27 | url: None, 28 | basic_auth: None, 29 | } 30 | } 31 | } 32 | 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/error.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct Error { 17 | /// Error code 18 | #[serde(rename = "error_code", skip_serializing_if = "Option::is_none")] 19 | pub error_code: Option, 20 | /// Human-readable error message that describes the cause of the error. 21 | #[serde(rename = "message", skip_serializing_if = "Option::is_none")] 22 | pub message: Option, 23 | } 24 | 25 | impl Error { 26 | pub fn new() -> Error { 27 | Error { 28 | error_code: None, 29 | message: None, 30 | } 31 | } 32 | } 33 | 34 | 35 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/file_storage_info.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct FileStorageInfo { 17 | /// File destination. Example: `file:/my/file.sh` 18 | #[serde(rename = "destination", skip_serializing_if = "Option::is_none")] 19 | pub destination: Option, 20 | } 21 | 22 | impl FileStorageInfo { 23 | pub fn new() -> FileStorageInfo { 24 | FileStorageInfo { 25 | destination: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/init_script_info.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct InitScriptInfo { 17 | #[serde(rename = "dbfs", skip_serializing_if = "Option::is_none")] 18 | pub dbfs: Option>, 19 | #[serde(rename = "file", skip_serializing_if = "Option::is_none")] 20 | pub file: Option>, 21 | #[serde(rename = "s3", skip_serializing_if = "Option::is_none")] 22 | pub s3: Option>, 23 | } 24 | 25 | impl InitScriptInfo { 26 | pub fn new() -> InitScriptInfo { 27 | InitScriptInfo { 28 | dbfs: None, 29 | file: None, 30 | s3: None, 31 | } 32 | } 33 | } 34 | 35 | 36 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/is_owner.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | /// IsOwner : Perimssion that represents ownership of the job. 13 | 14 | /// Perimssion that represents ownership of the job. 15 | #[derive(JsonSchema, Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] 16 | pub enum IsOwner { 17 | #[serde(rename = "IS_OWNER")] 18 | IsOwner, 19 | 20 | } 21 | 22 | impl ToString for IsOwner { 23 | fn to_string(&self) -> String { 24 | match self { 25 | Self::IsOwner => String::from("IS_OWNER"), 26 | } 27 | } 28 | } 29 | 30 | impl Default for IsOwner { 31 | fn default() -> IsOwner { 32 | Self::IsOwner 33 | } 34 | } 35 | 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/job_cluster.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobCluster { 17 | /// A unique name for the job cluster. This field is required and must be unique within the job. `JobTaskSettings` may refer to this field to determine which cluster to launch for the task execution. 18 | #[serde(rename = "job_cluster_key")] 19 | pub job_cluster_key: String, 20 | #[serde(rename = "new_cluster", skip_serializing_if = "Option::is_none")] 21 | pub new_cluster: Option>, 22 | } 23 | 24 | impl JobCluster { 25 | pub fn new(job_cluster_key: String) -> JobCluster { 26 | JobCluster { 27 | job_cluster_key, 28 | new_cluster: None, 29 | } 30 | } 31 | } 32 | 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/job_settings_queue.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | /// JobSettingsQueue : The queue settings of the job. 13 | 14 | 15 | 16 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 17 | pub struct JobSettingsQueue { 18 | #[serde(rename = "enabled")] 19 | pub enabled: Option, 20 | } 21 | 22 | impl JobSettingsQueue { 23 | /// The queue settings of the job. 24 | pub fn new() -> JobSettingsQueue { 25 | JobSettingsQueue { 26 | enabled: Some(false), 27 | } 28 | } 29 | } 30 | 31 | impl Default for JobSettingsQueue { 32 | fn default() -> Self { 33 | JobSettingsQueue { 34 | enabled: Some(false), 35 | } 36 | } 37 | } 38 | 39 | 40 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_create_200_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsCreate200Response { 17 | /// The canonical identifier for the newly created job. 18 | #[serde(rename = "job_id", skip_serializing_if = "Option::is_none")] 19 | pub job_id: Option, 20 | } 21 | 22 | impl JobsCreate200Response { 23 | pub fn new() -> JobsCreate200Response { 24 | JobsCreate200Response { 25 | job_id: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_delete_request.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsDeleteRequest { 17 | /// The canonical identifier of the job to delete. This field is required. 18 | #[serde(rename = "job_id")] 19 | pub job_id: i64, 20 | } 21 | 22 | impl JobsDeleteRequest { 23 | pub fn new(job_id: i64) -> JobsDeleteRequest { 24 | JobsDeleteRequest { 25 | job_id, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_list_200_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsList200Response { 17 | /// The list of jobs. 18 | #[serde(rename = "jobs", skip_serializing_if = "Option::is_none")] 19 | pub jobs: Option>, 20 | #[serde(rename = "has_more", skip_serializing_if = "Option::is_none")] 21 | pub has_more: Option, 22 | } 23 | 24 | impl JobsList200Response { 25 | pub fn new() -> JobsList200Response { 26 | JobsList200Response { 27 | jobs: None, 28 | has_more: None, 29 | } 30 | } 31 | } 32 | 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_reset_request.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsResetRequest { 17 | /// The canonical identifier of the job to reset. This field is required. 18 | #[serde(rename = "job_id")] 19 | pub job_id: i64, 20 | #[serde(rename = "new_settings", skip_serializing_if = "Option::is_none")] 21 | pub new_settings: Option>, 22 | } 23 | 24 | impl JobsResetRequest { 25 | pub fn new(job_id: i64) -> JobsResetRequest { 26 | JobsResetRequest { 27 | job_id, 28 | new_settings: None, 29 | } 30 | } 31 | } 32 | 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_run_now_200_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsRunNow200Response { 17 | /// The globally unique ID of the newly triggered run. 18 | #[serde(rename = "run_id", skip_serializing_if = "Option::is_none")] 19 | pub run_id: Option, 20 | /// A unique identifier for this job run. This is set to the same value as `run_id`. 21 | #[serde(rename = "number_in_job", skip_serializing_if = "Option::is_none")] 22 | pub number_in_job: Option, 23 | } 24 | 25 | impl JobsRunNow200Response { 26 | pub fn new() -> JobsRunNow200Response { 27 | JobsRunNow200Response { 28 | run_id: None, 29 | number_in_job: None, 30 | } 31 | } 32 | } 33 | 34 | 35 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_runs_cancel_all_request.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsRunsCancelAllRequest { 17 | /// The canonical identifier of the job to cancel all runs of. This field is required. 18 | #[serde(rename = "job_id")] 19 | pub job_id: i64, 20 | } 21 | 22 | impl JobsRunsCancelAllRequest { 23 | pub fn new(job_id: i64) -> JobsRunsCancelAllRequest { 24 | JobsRunsCancelAllRequest { 25 | job_id, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_runs_cancel_request.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsRunsCancelRequest { 17 | /// This field is required. 18 | #[serde(rename = "run_id")] 19 | pub run_id: i64, 20 | } 21 | 22 | impl JobsRunsCancelRequest { 23 | pub fn new(run_id: i64) -> JobsRunsCancelRequest { 24 | JobsRunsCancelRequest { 25 | run_id, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_runs_delete_request.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsRunsDeleteRequest { 17 | /// The canonical identifier of the run for which to retrieve the metadata. 18 | #[serde(rename = "run_id", skip_serializing_if = "Option::is_none")] 19 | pub run_id: Option, 20 | } 21 | 22 | impl JobsRunsDeleteRequest { 23 | pub fn new() -> JobsRunsDeleteRequest { 24 | JobsRunsDeleteRequest { 25 | run_id: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_runs_export_200_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsRunsExport200Response { 17 | /// The exported content in HTML format (one for every view item). 18 | #[serde(rename = "views", skip_serializing_if = "Option::is_none")] 19 | pub views: Option>, 20 | } 21 | 22 | impl JobsRunsExport200Response { 23 | pub fn new() -> JobsRunsExport200Response { 24 | JobsRunsExport200Response { 25 | views: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_runs_list_200_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsRunsList200Response { 17 | /// A list of runs, from most recently started to least. 18 | #[serde(rename = "runs", skip_serializing_if = "Option::is_none")] 19 | pub runs: Option>, 20 | /// If true, additional runs matching the provided filter are available for listing. 21 | #[serde(rename = "has_more", skip_serializing_if = "Option::is_none")] 22 | pub has_more: Option, 23 | } 24 | 25 | impl JobsRunsList200Response { 26 | pub fn new() -> JobsRunsList200Response { 27 | JobsRunsList200Response { 28 | runs: None, 29 | has_more: None, 30 | } 31 | } 32 | } 33 | 34 | 35 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_runs_repair_200_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsRunsRepair200Response { 17 | /// The ID of the repair. 18 | #[serde(rename = "repair_id", skip_serializing_if = "Option::is_none")] 19 | pub repair_id: Option, 20 | } 21 | 22 | impl JobsRunsRepair200Response { 23 | pub fn new() -> JobsRunsRepair200Response { 24 | JobsRunsRepair200Response { 25 | repair_id: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/jobs_runs_submit_200_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct JobsRunsSubmit200Response { 17 | /// The canonical identifier for the newly submitted run. 18 | #[serde(rename = "run_id", skip_serializing_if = "Option::is_none")] 19 | pub run_id: Option, 20 | } 21 | 22 | impl JobsRunsSubmit200Response { 23 | pub fn new() -> JobsRunsSubmit200Response { 24 | JobsRunsSubmit200Response { 25 | run_id: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/list_order.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | /// ListOrder : * `DESC`: Descending order. * `ASC`: Ascending order. 13 | 14 | /// * `DESC`: Descending order. * `ASC`: Ascending order. 15 | #[derive(JsonSchema, Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] 16 | pub enum ListOrder { 17 | #[serde(rename = "DESC")] 18 | Desc, 19 | #[serde(rename = "ASC")] 20 | Asc, 21 | 22 | } 23 | 24 | impl ToString for ListOrder { 25 | fn to_string(&self) -> String { 26 | match self { 27 | Self::Desc => String::from("DESC"), 28 | Self::Asc => String::from("ASC"), 29 | } 30 | } 31 | } 32 | 33 | impl Default for ListOrder { 34 | fn default() -> ListOrder { 35 | Self::Desc 36 | } 37 | } 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/permission_level.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | /// PermissionLevel : Permission level to grant. 13 | 14 | 15 | 16 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 17 | pub struct PermissionLevel { 18 | } 19 | 20 | impl PermissionLevel { 21 | /// Permission level to grant. 22 | pub fn new() -> PermissionLevel { 23 | PermissionLevel { 24 | } 25 | } 26 | } 27 | 28 | 29 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/permission_level_for_group.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | /// PermissionLevelForGroup : Permission level to grant. 13 | 14 | 15 | 16 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 17 | pub struct PermissionLevelForGroup { 18 | } 19 | 20 | impl PermissionLevelForGroup { 21 | /// Permission level to grant. 22 | pub fn new() -> PermissionLevelForGroup { 23 | PermissionLevelForGroup { 24 | } 25 | } 26 | } 27 | 28 | 29 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/pipeline_task.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct PipelineTask { 17 | /// The full name of the pipeline task to execute. 18 | #[serde(rename = "pipeline_id", skip_serializing_if = "Option::is_none")] 19 | pub pipeline_id: Option, 20 | /// If true, a full refresh will be triggered on the delta live table. 21 | #[serde(rename = "full_refresh", skip_serializing_if = "Option::is_none")] 22 | pub full_refresh: Option, 23 | } 24 | 25 | impl PipelineTask { 26 | pub fn new() -> PipelineTask { 27 | PipelineTask { 28 | pipeline_id: None, 29 | full_refresh: None, 30 | } 31 | } 32 | } 33 | 34 | 35 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/python_py_pi_library.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct PythonPyPiLibrary { 17 | /// The name of the PyPI package to install. An optional exact version specification is also supported. Examples: `simplejson` and `simplejson==3.8.0`. This field is required. 18 | #[serde(rename = "package")] 19 | pub package: String, 20 | /// The repository where the package can be found. If not specified, the default pip index is used. 21 | #[serde(rename = "repo", skip_serializing_if = "Option::is_none")] 22 | pub repo: Option, 23 | } 24 | 25 | impl PythonPyPiLibrary { 26 | pub fn new(package: String) -> PythonPyPiLibrary { 27 | PythonPyPiLibrary { 28 | package, 29 | repo: None, 30 | } 31 | } 32 | } 33 | 34 | 35 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/r_cran_library.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct RCranLibrary { 17 | /// The name of the CRAN package to install. This field is required. 18 | #[serde(rename = "package")] 19 | pub package: String, 20 | /// The repository where the package can be found. If not specified, the default CRAN repo is used. 21 | #[serde(rename = "repo", skip_serializing_if = "Option::is_none")] 22 | pub repo: Option, 23 | } 24 | 25 | impl RCranLibrary { 26 | pub fn new(package: String) -> RCranLibrary { 27 | RCranLibrary { 28 | package, 29 | repo: None, 30 | } 31 | } 32 | } 33 | 34 | 35 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/repair_history.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct RepairHistory { 17 | /// The repair history of the run. 18 | #[serde(rename = "repair_history", skip_serializing_if = "Option::is_none")] 19 | pub repair_history: Option>, 20 | } 21 | 22 | impl RepairHistory { 23 | pub fn new() -> RepairHistory { 24 | RepairHistory { 25 | repair_history: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/run_parameters_pipeline_params.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct RunParametersPipelineParams { 17 | /// If true, triggers a full refresh on the delta live table. 18 | #[serde(rename = "full_refresh", skip_serializing_if = "Option::is_none")] 19 | pub full_refresh: Option, 20 | } 21 | 22 | impl RunParametersPipelineParams { 23 | pub fn new() -> RunParametersPipelineParams { 24 | RunParametersPipelineParams { 25 | full_refresh: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/spark_node_aws_attributes.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct SparkNodeAwsAttributes { 17 | /// Whether this node is on an Amazon spot instance. 18 | #[serde(rename = "is_spot", skip_serializing_if = "Option::is_none")] 19 | pub is_spot: Option, 20 | } 21 | 22 | impl SparkNodeAwsAttributes { 23 | pub fn new() -> SparkNodeAwsAttributes { 24 | SparkNodeAwsAttributes { 25 | is_spot: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/spark_submit_task.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct SparkSubmitTask { 17 | /// Command-line parameters passed to spark submit. Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. 18 | #[serde(rename = "parameters", skip_serializing_if = "Option::is_none")] 19 | pub parameters: Option>, 20 | } 21 | 22 | impl SparkSubmitTask { 23 | pub fn new() -> SparkSubmitTask { 24 | SparkSubmitTask { 25 | parameters: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/sql_dashboard_output.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct SqlDashboardOutput { 17 | #[serde(rename = "widgets", skip_serializing_if = "Option::is_none")] 18 | pub widgets: Option>, 19 | } 20 | 21 | impl SqlDashboardOutput { 22 | pub fn new() -> SqlDashboardOutput { 23 | SqlDashboardOutput { 24 | widgets: None, 25 | } 26 | } 27 | } 28 | 29 | 30 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/sql_output.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct SqlOutput { 17 | #[serde(rename = "query_output", skip_serializing_if = "Option::is_none")] 18 | pub query_output: Option>, 19 | #[serde(rename = "dashboard_output", skip_serializing_if = "Option::is_none")] 20 | pub dashboard_output: Option>, 21 | #[serde(rename = "alert_output", skip_serializing_if = "Option::is_none")] 22 | pub alert_output: Option>, 23 | } 24 | 25 | impl SqlOutput { 26 | pub fn new() -> SqlOutput { 27 | SqlOutput { 28 | query_output: None, 29 | dashboard_output: None, 30 | alert_output: None, 31 | } 32 | } 33 | } 34 | 35 | 36 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/sql_output_error.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct SqlOutputError { 17 | /// The error message when execution fails. 18 | #[serde(rename = "message", skip_serializing_if = "Option::is_none")] 19 | pub message: Option, 20 | } 21 | 22 | impl SqlOutputError { 23 | pub fn new() -> SqlOutputError { 24 | SqlOutputError { 25 | message: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/sql_statement_output.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct SqlStatementOutput { 17 | /// A key that can be used to look up query details. 18 | #[serde(rename = "lookup_key", skip_serializing_if = "Option::is_none")] 19 | pub lookup_key: Option, 20 | } 21 | 22 | impl SqlStatementOutput { 23 | pub fn new() -> SqlStatementOutput { 24 | SqlStatementOutput { 25 | lookup_key: None, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/sql_task_alert.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct SqlTaskAlert { 17 | /// The canonical identifier of the SQL alert. 18 | #[serde(rename = "alert_id")] 19 | pub alert_id: String, 20 | } 21 | 22 | impl SqlTaskAlert { 23 | pub fn new(alert_id: String) -> SqlTaskAlert { 24 | SqlTaskAlert { 25 | alert_id, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/sql_task_dashboard.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct SqlTaskDashboard { 17 | /// The canonical identifier of the SQL dashboard. 18 | #[serde(rename = "dashboard_id")] 19 | pub dashboard_id: String, 20 | } 21 | 22 | impl SqlTaskDashboard { 23 | pub fn new(dashboard_id: String) -> SqlTaskDashboard { 24 | SqlTaskDashboard { 25 | dashboard_id, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/sql_task_query.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct SqlTaskQuery { 17 | /// The canonical identifier of the SQL query. 18 | #[serde(rename = "query_id")] 19 | pub query_id: String, 20 | } 21 | 22 | impl SqlTaskQuery { 23 | pub fn new(query_id: String) -> SqlTaskQuery { 24 | SqlTaskQuery { 25 | query_id, 26 | } 27 | } 28 | } 29 | 30 | 31 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/task_dependencies_inner.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct TaskDependenciesInner { 17 | #[serde(rename = "task_key", skip_serializing_if = "Option::is_none")] 18 | pub task_key: Option, 19 | } 20 | 21 | impl TaskDependenciesInner { 22 | pub fn new() -> TaskDependenciesInner { 23 | TaskDependenciesInner { 24 | task_key: None, 25 | } 26 | } 27 | } 28 | 29 | 30 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/view_type.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | /// ViewType : * `NOTEBOOK`: Notebook view item. * `DASHBOARD`: Dashboard view item. 13 | 14 | /// * `NOTEBOOK`: Notebook view item. * `DASHBOARD`: Dashboard view item. 15 | #[derive(JsonSchema, Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] 16 | pub enum ViewType { 17 | #[serde(rename = "NOTEBOOK")] 18 | Notebook, 19 | #[serde(rename = "DASHBOARD")] 20 | Dashboard, 21 | 22 | } 23 | 24 | impl ToString for ViewType { 25 | fn to_string(&self) -> String { 26 | match self { 27 | Self::Notebook => String::from("NOTEBOOK"), 28 | Self::Dashboard => String::from("DASHBOARD"), 29 | } 30 | } 31 | } 32 | 33 | impl Default for ViewType { 34 | fn default() -> ViewType { 35 | Self::Notebook 36 | } 37 | } 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /databricks-rust-jobs/src/models/webhook_notifications_on_start_inner.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Jobs API 2.1 4 | * 5 | * The Jobs API allows you to create, edit, and delete jobs. You should never hard code secrets or store them in plain text. Use the [Secrets API](https://docs.databricks.com/dev-tools/api/latest/secrets.html) to manage secrets in the [Databricks CLI](https://docs.databricks.com/dev-tools/cli/index.html). Use the [Secrets utility](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets) to reference secrets in notebooks and jobs. 6 | * 7 | * The version of the OpenAPI document: 2.1 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | 13 | 14 | 15 | #[derive(JsonSchema, Clone, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WebhookNotificationsOnStartInner { 17 | #[serde(rename = "id", skip_serializing_if = "Option::is_none")] 18 | pub id: Option, 19 | } 20 | 21 | impl WebhookNotificationsOnStartInner { 22 | pub fn new() -> WebhookNotificationsOnStartInner { 23 | WebhookNotificationsOnStartInner { 24 | id: None, 25 | } 26 | } 27 | } 28 | 29 | 30 | -------------------------------------------------------------------------------- /databricks-rust-repos/.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | **/*.rs.bk 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /databricks-rust-repos/.openapi-generator-ignore: -------------------------------------------------------------------------------- 1 | # OpenAPI Generator Ignore 2 | # Generated by openapi-generator https://github.com/openapitools/openapi-generator 3 | 4 | # Use this file to prevent files from being overwritten by the generator. 5 | # The patterns follow closely to .gitignore or .dockerignore. 6 | 7 | # As an example, the C# client generator defines ApiClient.cs. 8 | # You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: 9 | #ApiClient.cs 10 | 11 | # You can match any string of characters against a directory, file or extension with a single asterisk (*): 12 | #foo/*/qux 13 | # The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux 14 | 15 | # You can recursively match patterns against a directory, file or extension with a double asterisk (**): 16 | #foo/**/qux 17 | # This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux 18 | 19 | # You can also negate patterns with an exclamation (!). 20 | # For example, you can ignore all files in a docs folder with the file extension .md: 21 | #docs/*.md 22 | # Then explicitly reverse the ignore rule for a single file: 23 | #!docs/README.md 24 | -------------------------------------------------------------------------------- /databricks-rust-repos/.openapi-generator/FILES: -------------------------------------------------------------------------------- 1 | .gitignore 2 | .openapi-generator-ignore 3 | .travis.yml 4 | Cargo.toml 5 | README.md 6 | docs/Branch.md 7 | docs/CreateRepoRequest.md 8 | docs/DefaultApi.md 9 | docs/Error.md 10 | docs/GetRepoResponse.md 11 | docs/GetReposResponse.md 12 | docs/Tag.md 13 | docs/UpdateRepoRequest.md 14 | git_push.sh 15 | src/apis/configuration.rs 16 | src/apis/default_api.rs 17 | src/apis/mod.rs 18 | src/lib.rs 19 | src/models/branch.rs 20 | src/models/create_repo_request.rs 21 | src/models/error.rs 22 | src/models/get_repo_response.rs 23 | src/models/get_repos_response.rs 24 | src/models/mod.rs 25 | src/models/tag.rs 26 | src/models/update_repo_request.rs 27 | -------------------------------------------------------------------------------- /databricks-rust-repos/.openapi-generator/VERSION: -------------------------------------------------------------------------------- 1 | 6.2.0 -------------------------------------------------------------------------------- /databricks-rust-repos/.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | -------------------------------------------------------------------------------- /databricks-rust-repos/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "databricks_rust_repos" 3 | version = "2.0.1" 4 | authors = ["OpenAPI Generator team and contributors"] 5 | edition = "2018" 6 | 7 | [dependencies] 8 | schemars = "0.8.11" 9 | serde = { version = "^1.0", features = ["derive"] } 10 | serde_derive = "^1.0" 11 | serde_with = "^2.0" 12 | serde_json = "^1.0" 13 | url = "^2.5" 14 | uuid = { version = "^1.8", features = ["serde", "v4"] } 15 | reqwest = { version = "^0.12", features = ["json", "multipart"] } 16 | -------------------------------------------------------------------------------- /databricks-rust-repos/docs/Branch.md: -------------------------------------------------------------------------------- 1 | # Branch 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **branch** | **String** | Branch that the local version of the repo is checked out to. | 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-repos/docs/CreateRepoRequest.md: -------------------------------------------------------------------------------- 1 | # CreateRepoRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **url** | **String** | URL of the Git repository to be linked. | 8 | **provider** | **String** | Git provider. This field is case-insensitive. The available Git providers are gitHub, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and awsCodeCommit. | 9 | **path** | Option<**String**> | Desired path for the repo in the workspace. Must be in the format /Repos/{folder}/{repo-name}. | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-repos/docs/Error.md: -------------------------------------------------------------------------------- 1 | # Error 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **error_code** | Option<**String**> | Error code | [optional] 8 | **message** | Option<**String**> | Human-readable error message describing the cause of the error. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-repos/docs/GetRepoResponse.md: -------------------------------------------------------------------------------- 1 | # GetRepoResponse 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **id** | Option<**i64**> | ID of the repo object in the workspace. | [optional] 8 | **url** | Option<**String**> | URL of the Git repository to be linked. | [optional] 9 | **provider** | Option<**String**> | Git provider. This field is case-insensitive. The available Git providers are gitHub, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and awsCodeCommit. | [optional] 10 | **path** | Option<**String**> | Desired path for the repo in the workspace. Must be in the format /Repos/{folder}/{repo-name}. | [optional] 11 | **branch** | Option<**String**> | Branch that the local version of the repo is checked out to. | [optional] 12 | **head_commit_id** | Option<**String**> | SHA-1 hash representing the commit ID of the current HEAD of the repo. | [optional] 13 | 14 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 15 | 16 | 17 | -------------------------------------------------------------------------------- /databricks-rust-repos/docs/GetReposResponse.md: -------------------------------------------------------------------------------- 1 | # GetReposResponse 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **repos** | Option<[**Vec**](GetRepoResponse.md)> | | [optional] 8 | **next_page_token** | Option<**String**> | Token that can be specified as a query parameter to the GET /repos endpoint to retrieve the next page of results. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-repos/docs/README.md: -------------------------------------------------------------------------------- 1 | # docs 2 | 3 | -------------------------------------------------------------------------------- /databricks-rust-repos/docs/Tag.md: -------------------------------------------------------------------------------- 1 | # Tag 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **tag** | **String** | Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo in a detached HEAD state. Before committing new changes, you must update the repo to a branch instead of the detached HEAD. | 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-repos/docs/UpdateRepoRequest.md: -------------------------------------------------------------------------------- 1 | # UpdateRepoRequest 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **branch** | **String** | Branch that the local version of the repo is checked out to. | 8 | **tag** | **String** | Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo in a detached HEAD state. Before committing new changes, you must update the repo to a branch instead of the detached HEAD. | 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-repos/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde_derive; 3 | 4 | extern crate reqwest; 5 | extern crate serde; 6 | extern crate serde_json; 7 | extern crate url; 8 | 9 | pub mod apis; 10 | pub mod models; 11 | -------------------------------------------------------------------------------- /databricks-rust-repos/src/models/branch.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Repos API 4 | * 5 | * The repos API allows users to manage their [repos](https://docs.databricks.com/repos.html). Users can use the API to access all repos that they have manage permissions on. 6 | * 7 | * The version of the OpenAPI document: 2.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | #[derive(JsonSchema, Clone, Debug, PartialEq, Default, Serialize, Deserialize)] 13 | pub struct Branch { 14 | /// Branch that the local version of the repo is checked out to. 15 | #[serde(rename = "branch")] 16 | pub branch: String, 17 | } 18 | 19 | impl Branch { 20 | pub fn new(branch: String) -> Branch { 21 | Branch { branch } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /databricks-rust-repos/src/models/create_repo_request.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Repos API 4 | * 5 | * The repos API allows users to manage their [repos](https://docs.databricks.com/repos.html). Users can use the API to access all repos that they have manage permissions on. 6 | * 7 | * The version of the OpenAPI document: 2.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | #[derive(JsonSchema, Clone, Debug, PartialEq, Default, Serialize, Deserialize)] 13 | pub struct CreateRepoRequest { 14 | /// URL of the Git repository to be linked. 15 | #[serde(rename = "url")] 16 | pub url: String, 17 | /// Git provider. This field is case-insensitive. The available Git providers are gitHub, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and awsCodeCommit. 18 | #[serde(rename = "provider")] 19 | pub provider: String, 20 | /// Desired path for the repo in the workspace. Must be in the format /Repos/{folder}/{repo-name}. 21 | #[serde(rename = "path", skip_serializing_if = "Option::is_none")] 22 | pub path: Option, 23 | } 24 | 25 | impl CreateRepoRequest { 26 | pub fn new(url: String, provider: String) -> CreateRepoRequest { 27 | CreateRepoRequest { 28 | url, 29 | provider, 30 | path: None, 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /databricks-rust-repos/src/models/error.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Repos API 4 | * 5 | * The repos API allows users to manage their [repos](https://docs.databricks.com/repos.html). Users can use the API to access all repos that they have manage permissions on. 6 | * 7 | * The version of the OpenAPI document: 2.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | #[derive(JsonSchema, Clone, Debug, PartialEq, Default, Serialize, Deserialize)] 13 | pub struct Error { 14 | /// Error code 15 | #[serde(rename = "error_code", skip_serializing_if = "Option::is_none")] 16 | pub error_code: Option, 17 | /// Human-readable error message describing the cause of the error. 18 | #[serde(rename = "message", skip_serializing_if = "Option::is_none")] 19 | pub message: Option, 20 | } 21 | 22 | impl Error { 23 | pub fn new() -> Error { 24 | Error { 25 | error_code: None, 26 | message: None, 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /databricks-rust-repos/src/models/get_repos_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Repos API 4 | * 5 | * The repos API allows users to manage their [repos](https://docs.databricks.com/repos.html). Users can use the API to access all repos that they have manage permissions on. 6 | * 7 | * The version of the OpenAPI document: 2.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | #[derive(JsonSchema, Clone, Debug, PartialEq, Default, Serialize, Deserialize)] 13 | pub struct GetReposResponse { 14 | #[serde(rename = "repos", skip_serializing_if = "Option::is_none")] 15 | pub repos: Option>, 16 | /// Token that can be specified as a query parameter to the GET /repos endpoint to retrieve the next page of results. 17 | #[serde(rename = "next_page_token", skip_serializing_if = "Option::is_none")] 18 | pub next_page_token: Option, 19 | } 20 | 21 | impl GetReposResponse { 22 | pub fn new() -> GetReposResponse { 23 | GetReposResponse { 24 | repos: None, 25 | next_page_token: None, 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /databricks-rust-repos/src/models/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod branch; 2 | pub use self::branch::Branch; 3 | pub mod create_repo_request; 4 | pub use self::create_repo_request::CreateRepoRequest; 5 | pub mod error; 6 | pub use self::error::Error; 7 | pub mod get_repo_response; 8 | pub use self::get_repo_response::GetRepoResponse; 9 | pub mod get_repos_response; 10 | pub use self::get_repos_response::GetReposResponse; 11 | pub mod tag; 12 | pub use self::tag::Tag; 13 | pub mod update_repo_request; 14 | pub use self::update_repo_request::UpdateRepoRequest; 15 | -------------------------------------------------------------------------------- /databricks-rust-repos/src/models/tag.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Repos API 4 | * 5 | * The repos API allows users to manage their [repos](https://docs.databricks.com/repos.html). Users can use the API to access all repos that they have manage permissions on. 6 | * 7 | * The version of the OpenAPI document: 2.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | #[derive(JsonSchema, Clone, Debug, PartialEq, Default, Serialize, Deserialize)] 13 | pub struct Tag { 14 | /// Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo in a detached HEAD state. Before committing new changes, you must update the repo to a branch instead of the detached HEAD. 15 | #[serde(rename = "tag")] 16 | pub tag: String, 17 | } 18 | 19 | impl Tag { 20 | pub fn new(tag: String) -> Tag { 21 | Tag { tag } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /databricks-rust-repos/src/models/update_repo_request.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Repos API 4 | * 5 | * The repos API allows users to manage their [repos](https://docs.databricks.com/repos.html). Users can use the API to access all repos that they have manage permissions on. 6 | * 7 | * The version of the OpenAPI document: 2.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | #[derive(JsonSchema, Clone, Debug, PartialEq, Default, Serialize, Deserialize)] 13 | pub struct UpdateRepoRequest { 14 | /// Branch that the local version of the repo is checked out to. 15 | #[serde(rename = "branch")] 16 | pub branch: String, 17 | /// Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo in a detached HEAD state. Before committing new changes, you must update the repo to a branch instead of the detached HEAD. 18 | #[serde(rename = "tag")] 19 | pub tag: String, 20 | } 21 | 22 | impl UpdateRepoRequest { 23 | pub fn new(branch: String, tag: String) -> UpdateRepoRequest { 24 | UpdateRepoRequest { branch, tag } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /databricks-rust-secrets/.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | **/*.rs.bk 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /databricks-rust-secrets/.openapi-generator-ignore: -------------------------------------------------------------------------------- 1 | # OpenAPI Generator Ignore 2 | # Generated by openapi-generator https://github.com/openapitools/openapi-generator 3 | 4 | # Use this file to prevent files from being overwritten by the generator. 5 | # The patterns follow closely to .gitignore or .dockerignore. 6 | 7 | # As an example, the C# client generator defines ApiClient.cs. 8 | # You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: 9 | #ApiClient.cs 10 | 11 | # You can match any string of characters against a directory, file or extension with a single asterisk (*): 12 | #foo/*/qux 13 | # The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux 14 | 15 | # You can recursively match patterns against a directory, file or extension with a double asterisk (**): 16 | #foo/**/qux 17 | # This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux 18 | 19 | # You can also negate patterns with an exclamation (!). 20 | # For example, you can ignore all files in a docs folder with the file extension .md: 21 | #docs/*.md 22 | # Then explicitly reverse the ignore rule for a single file: 23 | #!docs/README.md 24 | -------------------------------------------------------------------------------- /databricks-rust-secrets/.openapi-generator/VERSION: -------------------------------------------------------------------------------- 1 | 7.7.0 2 | -------------------------------------------------------------------------------- /databricks-rust-secrets/.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | -------------------------------------------------------------------------------- /databricks-rust-secrets/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "databricks_rust_secrets" 3 | version = "2.0.0" 4 | authors = ["OpenAPI Generator team and contributors"] 5 | description = "No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)" 6 | # Override this license by providing a License Object in the OpenAPI. 7 | license = "Unlicense" 8 | edition = "2021" 9 | 10 | [dependencies] 11 | schemars = "0.8.11" 12 | serde = { version = "^1.0", features = ["derive"] } 13 | serde_json = "^1.0" 14 | url = "^2.5" 15 | uuid = { version = "^1.8", features = ["serde", "v4"] } 16 | reqwest = { version = "^0.12", features = ["json", "multipart"] } 17 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceAclItem.md: -------------------------------------------------------------------------------- 1 | # WorkspaceAclItem 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **permission** | [**models::WorkspaceAclPermission**](WorkspaceAclPermission.md) | The permission level applied to the principal. | 8 | **principal** | **String** | The principal in which the permission is applied. | 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceAclPermission.md: -------------------------------------------------------------------------------- 1 | # WorkspaceAclPermission 2 | 3 | ## Enum Variants 4 | 5 | | Name | Value | 6 | |---- | -----| 7 | | Read | READ | 8 | | Write | WRITE | 9 | | Manage | MANAGE | 10 | 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceAzureKeyVaultSecretScopeMetadata.md: -------------------------------------------------------------------------------- 1 | # WorkspaceAzureKeyVaultSecretScopeMetadata 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **dns_name** | **String** | The DNS of the KeyVault | 8 | **resource_id** | **String** | The resource id of the azure KeyVault that user wants to associate the scope with. | 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceCreateScope.md: -------------------------------------------------------------------------------- 1 | # WorkspaceCreateScope 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **backend_azure_keyvault** | Option<[**models::WorkspaceAzureKeyVaultSecretScopeMetadata**](WorkspaceAzureKeyVaultSecretScopeMetadata.md)> | The metadata for the secret scope if the type is `AZURE_KEYVAULT` | [optional] 8 | **initial_manage_principal** | Option<**String**> | The principal that is initially granted `MANAGE` permission to the created scope. | [optional] 9 | **scope** | **String** | Scope name requested by the user. Scope names are unique. | 10 | **scope_backend_type** | Option<[**models::WorkspaceScopeBackendType**](WorkspaceScopeBackendType.md)> | The backend type the scope will be created with. If not specified, will default to `DATABRICKS` | [optional] 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceDeleteAcl.md: -------------------------------------------------------------------------------- 1 | # WorkspaceDeleteAcl 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **principal** | **String** | The principal to remove an existing ACL from. | 8 | **scope** | **String** | The name of the scope to remove permissions from. | 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceDeleteScope.md: -------------------------------------------------------------------------------- 1 | # WorkspaceDeleteScope 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **scope** | **String** | Name of the scope to delete. | 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceDeleteSecret.md: -------------------------------------------------------------------------------- 1 | # WorkspaceDeleteSecret 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **key** | **String** | Name of the secret to delete. | 8 | **scope** | **String** | The name of the scope that contains the secret to delete. | 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceGetSecretResponse.md: -------------------------------------------------------------------------------- 1 | # WorkspaceGetSecretResponse 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **key** | Option<**String**> | A unique name to identify the secret. | [optional] 8 | **value** | Option<**String**> | The value of the secret in its byte representation. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceListAclsResponse.md: -------------------------------------------------------------------------------- 1 | # WorkspaceListAclsResponse 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **items** | Option<[**Vec**](WorkspaceAclItem.md)> | The associated ACLs rule applied to principals in the given scope. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceListScopesResponse.md: -------------------------------------------------------------------------------- 1 | # WorkspaceListScopesResponse 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **scopes** | Option<[**Vec**](WorkspaceSecretScope.md)> | The available secret scopes. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceListSecretsResponse.md: -------------------------------------------------------------------------------- 1 | # WorkspaceListSecretsResponse 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **secrets** | Option<[**Vec**](WorkspaceSecretMetadata.md)> | Metadata information of all secrets contained within the given scope. | [optional] 8 | 9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 10 | 11 | 12 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspacePutAcl.md: -------------------------------------------------------------------------------- 1 | # WorkspacePutAcl 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **permission** | [**models::WorkspaceAclPermission**](WorkspaceAclPermission.md) | The permission level applied to the principal. | 8 | **principal** | **String** | The principal in which the permission is applied. | 9 | **scope** | **String** | The name of the scope to apply permissions to. | 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspacePutSecret.md: -------------------------------------------------------------------------------- 1 | # WorkspacePutSecret 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **bytes_value** | Option<**String**> | If specified, value will be stored as bytes. | [optional] 8 | **key** | **String** | A unique name to identify the secret. | 9 | **scope** | **String** | The name of the scope to which the secret will be associated with. | 10 | **string_value** | Option<**String**> | If specified, note that the value will be stored in UTF-8 (MB4) form. | [optional] 11 | 12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 13 | 14 | 15 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceScopeBackendType.md: -------------------------------------------------------------------------------- 1 | # WorkspaceScopeBackendType 2 | 3 | ## Enum Variants 4 | 5 | | Name | Value | 6 | |---- | -----| 7 | | Databricks | DATABRICKS | 8 | | AzureKeyvault | AZURE_KEYVAULT | 9 | 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceSecretMetadata.md: -------------------------------------------------------------------------------- 1 | # WorkspaceSecretMetadata 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **key** | Option<**String**> | A unique name to identify the secret. | [optional] 8 | **last_updated_timestamp** | Option<**i64**> | The last updated timestamp (in milliseconds) for the secret. | [optional] 9 | 10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 11 | 12 | 13 | -------------------------------------------------------------------------------- /databricks-rust-secrets/docs/WorkspaceSecretScope.md: -------------------------------------------------------------------------------- 1 | # WorkspaceSecretScope 2 | 3 | ## Properties 4 | 5 | Name | Type | Description | Notes 6 | ------------ | ------------- | ------------- | ------------- 7 | **backend_type** | Option<[**models::WorkspaceScopeBackendType**](WorkspaceScopeBackendType.md)> | The type of secret scope backend. | [optional] 8 | **keyvault_metadata** | Option<[**models::WorkspaceAzureKeyVaultSecretScopeMetadata**](WorkspaceAzureKeyVaultSecretScopeMetadata.md)> | The metadata for the secret scope if the type is `AZURE_KEYVAULT` | [optional] 9 | **name** | Option<**String**> | A unique name to identify the secret scope. | [optional] 10 | 11 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) 12 | 13 | 14 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused_imports)] 2 | #![allow(clippy::too_many_arguments)] 3 | 4 | extern crate serde; 5 | extern crate serde_json; 6 | extern crate url; 7 | extern crate reqwest; 8 | 9 | pub mod apis; 10 | pub mod models; 11 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_acl_item.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceAclItem { 17 | /// The permission level applied to the principal. 18 | #[serde(rename = "permission")] 19 | pub permission: models::WorkspaceAclPermission, 20 | /// The principal in which the permission is applied. 21 | #[serde(rename = "principal")] 22 | pub principal: String, 23 | } 24 | 25 | impl WorkspaceAclItem { 26 | pub fn new(permission: models::WorkspaceAclPermission, principal: String) -> WorkspaceAclItem { 27 | WorkspaceAclItem { 28 | permission, 29 | principal, 30 | } 31 | } 32 | } 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_acl_permission.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | /// 16 | #[derive(JsonSchema, Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] 17 | pub enum WorkspaceAclPermission { 18 | #[serde(rename = "READ")] 19 | Read, 20 | #[serde(rename = "WRITE")] 21 | Write, 22 | #[serde(rename = "MANAGE")] 23 | Manage, 24 | 25 | } 26 | 27 | impl std::fmt::Display for WorkspaceAclPermission { 28 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 29 | match self { 30 | Self::Read => write!(f, "READ"), 31 | Self::Write => write!(f, "WRITE"), 32 | Self::Manage => write!(f, "MANAGE"), 33 | } 34 | } 35 | } 36 | 37 | impl Default for WorkspaceAclPermission { 38 | fn default() -> WorkspaceAclPermission { 39 | Self::Read 40 | } 41 | } 42 | 43 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_azure_key_vault_secret_scope_metadata.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceAzureKeyVaultSecretScopeMetadata { 17 | /// The DNS of the KeyVault 18 | #[serde(rename = "dns_name")] 19 | pub dns_name: String, 20 | /// The resource id of the azure KeyVault that user wants to associate the scope with. 21 | #[serde(rename = "resource_id")] 22 | pub resource_id: String, 23 | } 24 | 25 | impl WorkspaceAzureKeyVaultSecretScopeMetadata { 26 | pub fn new(dns_name: String, resource_id: String) -> WorkspaceAzureKeyVaultSecretScopeMetadata { 27 | WorkspaceAzureKeyVaultSecretScopeMetadata { 28 | dns_name, 29 | resource_id, 30 | } 31 | } 32 | } 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_delete_acl.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceDeleteAcl { 17 | /// The principal to remove an existing ACL from. 18 | #[serde(rename = "principal")] 19 | pub principal: String, 20 | /// The name of the scope to remove permissions from. 21 | #[serde(rename = "scope")] 22 | pub scope: String, 23 | } 24 | 25 | impl WorkspaceDeleteAcl { 26 | pub fn new(principal: String, scope: String) -> WorkspaceDeleteAcl { 27 | WorkspaceDeleteAcl { 28 | principal, 29 | scope, 30 | } 31 | } 32 | } 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_delete_scope.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceDeleteScope { 17 | /// Name of the scope to delete. 18 | #[serde(rename = "scope")] 19 | pub scope: String, 20 | } 21 | 22 | impl WorkspaceDeleteScope { 23 | pub fn new(scope: String) -> WorkspaceDeleteScope { 24 | WorkspaceDeleteScope { 25 | scope, 26 | } 27 | } 28 | } 29 | 30 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_delete_secret.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceDeleteSecret { 17 | /// Name of the secret to delete. 18 | #[serde(rename = "key")] 19 | pub key: String, 20 | /// The name of the scope that contains the secret to delete. 21 | #[serde(rename = "scope")] 22 | pub scope: String, 23 | } 24 | 25 | impl WorkspaceDeleteSecret { 26 | pub fn new(key: String, scope: String) -> WorkspaceDeleteSecret { 27 | WorkspaceDeleteSecret { 28 | key, 29 | scope, 30 | } 31 | } 32 | } 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_get_secret_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceGetSecretResponse { 17 | /// A unique name to identify the secret. 18 | #[serde(rename = "key", skip_serializing_if = "Option::is_none")] 19 | pub key: Option, 20 | /// The value of the secret in its byte representation. 21 | #[serde(rename = "value", skip_serializing_if = "Option::is_none")] 22 | pub value: Option, 23 | } 24 | 25 | impl WorkspaceGetSecretResponse { 26 | pub fn new() -> WorkspaceGetSecretResponse { 27 | WorkspaceGetSecretResponse { 28 | key: None, 29 | value: None, 30 | } 31 | } 32 | } 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_list_acls_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceListAclsResponse { 17 | /// The associated ACLs rule applied to principals in the given scope. 18 | #[serde(rename = "items", skip_serializing_if = "Option::is_none")] 19 | pub items: Option>, 20 | } 21 | 22 | impl WorkspaceListAclsResponse { 23 | pub fn new() -> WorkspaceListAclsResponse { 24 | WorkspaceListAclsResponse { 25 | items: None, 26 | } 27 | } 28 | } 29 | 30 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_list_scopes_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceListScopesResponse { 17 | /// The available secret scopes. 18 | #[serde(rename = "scopes", skip_serializing_if = "Option::is_none")] 19 | pub scopes: Option>, 20 | } 21 | 22 | impl WorkspaceListScopesResponse { 23 | pub fn new() -> WorkspaceListScopesResponse { 24 | WorkspaceListScopesResponse { 25 | scopes: None, 26 | } 27 | } 28 | } 29 | 30 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_list_secrets_response.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceListSecretsResponse { 17 | /// Metadata information of all secrets contained within the given scope. 18 | #[serde(rename = "secrets", skip_serializing_if = "Option::is_none")] 19 | pub secrets: Option>, 20 | } 21 | 22 | impl WorkspaceListSecretsResponse { 23 | pub fn new() -> WorkspaceListSecretsResponse { 24 | WorkspaceListSecretsResponse { 25 | secrets: None, 26 | } 27 | } 28 | } 29 | 30 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_put_acl.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspacePutAcl { 17 | /// The permission level applied to the principal. 18 | #[serde(rename = "permission")] 19 | pub permission: models::WorkspaceAclPermission, 20 | /// The principal in which the permission is applied. 21 | #[serde(rename = "principal")] 22 | pub principal: String, 23 | /// The name of the scope to apply permissions to. 24 | #[serde(rename = "scope")] 25 | pub scope: String, 26 | } 27 | 28 | impl WorkspacePutAcl { 29 | pub fn new(permission: models::WorkspaceAclPermission, principal: String, scope: String) -> WorkspacePutAcl { 30 | WorkspacePutAcl { 31 | permission, 32 | principal, 33 | scope, 34 | } 35 | } 36 | } 37 | 38 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_scope_backend_type.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | /// 16 | #[derive(JsonSchema, Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] 17 | pub enum WorkspaceScopeBackendType { 18 | #[serde(rename = "DATABRICKS")] 19 | Databricks, 20 | #[serde(rename = "AZURE_KEYVAULT")] 21 | AzureKeyvault, 22 | 23 | } 24 | 25 | impl std::fmt::Display for WorkspaceScopeBackendType { 26 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 27 | match self { 28 | Self::Databricks => write!(f, "DATABRICKS"), 29 | Self::AzureKeyvault => write!(f, "AZURE_KEYVAULT"), 30 | } 31 | } 32 | } 33 | 34 | impl Default for WorkspaceScopeBackendType { 35 | fn default() -> WorkspaceScopeBackendType { 36 | Self::Databricks 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_secret_metadata.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceSecretMetadata { 17 | /// A unique name to identify the secret. 18 | #[serde(rename = "key", skip_serializing_if = "Option::is_none")] 19 | pub key: Option, 20 | /// The last updated timestamp (in milliseconds) for the secret. 21 | #[serde(rename = "last_updated_timestamp", skip_serializing_if = "Option::is_none")] 22 | pub last_updated_timestamp: Option, 23 | } 24 | 25 | impl WorkspaceSecretMetadata { 26 | pub fn new() -> WorkspaceSecretMetadata { 27 | WorkspaceSecretMetadata { 28 | key: None, 29 | last_updated_timestamp: None, 30 | } 31 | } 32 | } 33 | 34 | -------------------------------------------------------------------------------- /databricks-rust-secrets/src/models/workspace_secret_scope.rs: -------------------------------------------------------------------------------- 1 | use schemars::JsonSchema; 2 | /* 3 | * Databricks Accounts and Workspace REST API on ALL 4 | * 5 | * No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) 6 | * 7 | * The version of the OpenAPI document: 1.0.0 8 | * 9 | * Generated by: https://openapi-generator.tech 10 | */ 11 | 12 | use crate::models; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(JsonSchema, Clone, Default, Debug, PartialEq, Serialize, Deserialize)] 16 | pub struct WorkspaceSecretScope { 17 | /// The type of secret scope backend. 18 | #[serde(rename = "backend_type", skip_serializing_if = "Option::is_none")] 19 | pub backend_type: Option, 20 | /// The metadata for the secret scope if the type is `AZURE_KEYVAULT` 21 | #[serde(rename = "keyvault_metadata", skip_serializing_if = "Option::is_none")] 22 | pub keyvault_metadata: Option>, 23 | /// A unique name to identify the secret scope. 24 | #[serde(rename = "name", skip_serializing_if = "Option::is_none")] 25 | pub name: Option, 26 | } 27 | 28 | impl WorkspaceSecretScope { 29 | pub fn new() -> WorkspaceSecretScope { 30 | WorkspaceSecretScope { 31 | backend_type: None, 32 | keyvault_metadata: None, 33 | name: None, 34 | } 35 | } 36 | } 37 | 38 | -------------------------------------------------------------------------------- /examples/databricks-secret.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: com.dstancu.databricks/v1 3 | kind: DatabricksSecretScope 4 | metadata: 5 | name: my-secret-scope 6 | namespace: default 7 | spec: 8 | scope: 9 | name: my-super-cool-scope 10 | --- 11 | apiVersion: v1 12 | data: 13 | hello: d3VybA== 14 | foo: YmFy 15 | kind: Secret 16 | metadata: 17 | name: my-super-cool-secret 18 | type: Opaque 19 | --- 20 | apiVersion: com.dstancu.databricks/v1 21 | kind: DatabricksSecret 22 | metadata: 23 | name: my-super-cool-databricks-secret 24 | namespace: default 25 | spec: 26 | scope: my-super-cool-scope 27 | secret_name: my-super-cool-secret -------------------------------------------------------------------------------- /examples/git-credential.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: Secret 4 | metadata: 5 | name: my-secret-name 6 | namespace: default 7 | type: Opaque 8 | data: 9 | personal_access_token: YXl5eXkgbG1hbw== 10 | --- 11 | apiVersion: com.dstancu.databricks/v1 12 | kind: GitCredential 13 | metadata: 14 | name: example-credential 15 | namespace: default 16 | spec: 17 | secret_name: my-secret-name 18 | credential: 19 | git_username: mach-kernel 20 | git_provider: gitHub -------------------------------------------------------------------------------- /examples/git-repo.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: com.dstancu.databricks/v1 2 | kind: Repo 3 | metadata: 4 | annotations: 5 | databricks-operator/owner: operator 6 | name: databricks-kube-operator 7 | spec: 8 | repository: 9 | path: /Repos/Test/databricks-kube-operator 10 | provider: gitHub 11 | url: https://github.com/mach-kernel/databricks-kube-operator.git 12 | -------------------------------------------------------------------------------- /examples/job.py: -------------------------------------------------------------------------------- 1 | # Databricks notebook source 2 | 3 | # COMMAND ---------- 4 | import pyspark 5 | from pyspark.sql import SparkSession 6 | from pyspark.sql.types import StringType 7 | import pyspark.sql.functions as F 8 | from functools import reduce 9 | import requests 10 | 11 | spark = SparkSession \ 12 | .builder \ 13 | .master("local[*]") \ 14 | .appName('why-is-the-example-always-wordcount') \ 15 | .getOrCreate() 16 | 17 | sc = spark.sparkContext 18 | 19 | moby_dick = requests.get('https://www.gutenberg.org/files/2701/old/moby10b.txt').text 20 | 21 | words = reduce( 22 | lambda words, line: words + [w.lower() for w in line.split(' ')], 23 | moby_dick.splitlines(), 24 | [] 25 | ) 26 | 27 | df = spark.createDataFrame(words, StringType()) 28 | 29 | df \ 30 | .withColumnRenamed(df.columns[0], "word") \ 31 | .groupBy("word") \ 32 | .agg(F.count("word").alias("count")) \ 33 | .orderBy(F.desc("count")) \ 34 | .show() 35 | -------------------------------------------------------------------------------- /openapi/config-git.yaml: -------------------------------------------------------------------------------- 1 | packageName: databricks_rust_git_credentials 2 | packageVersion: 2.1.0 -------------------------------------------------------------------------------- /openapi/config-jobs.yaml: -------------------------------------------------------------------------------- 1 | packageName: databricks_rust_jobs 2 | packageVersion: 2.1.0 -------------------------------------------------------------------------------- /openapi/config-repos.yaml: -------------------------------------------------------------------------------- 1 | packageName: databricks_rust_repos 2 | packageVersion: 2.0.0 -------------------------------------------------------------------------------- /openapi/config-secrets.yaml: -------------------------------------------------------------------------------- 1 | packageName: databricks_rust_secrets 2 | packageVersion: 2.0.0 --------------------------------------------------------------------------------