├── components ├── dbt │ ├── example │ │ ├── silver │ │ │ ├── analyses │ │ │ │ └── README.md │ │ │ ├── macros │ │ │ │ └── README.md │ │ │ ├── seeds │ │ │ │ └── README.md │ │ │ ├── tests │ │ │ │ └── README.md │ │ │ ├── snapshots │ │ │ │ └── README.md │ │ │ ├── dbt_project.yml │ │ │ └── models │ │ │ │ ├── hive_to_iceberg.sql │ │ │ │ └── example.sql │ │ └── profiles.yml │ └── docker │ │ ├── build-docker.sh │ │ └── Dockerfile ├── livy │ ├── chart │ │ ├── templates │ │ │ ├── _helpers.tpl │ │ │ ├── pvc.yaml │ │ │ ├── service.yaml │ │ │ ├── configmap.yaml │ │ │ └── rbac.yaml │ │ ├── Chart.yaml │ │ └── values.yaml │ └── docker │ │ ├── run-livy.sh │ │ ├── config │ │ ├── build-docker.sh │ │ └── Dockerfile ├── metabase │ ├── chart │ │ ├── templates │ │ │ ├── _helpers.tpl │ │ │ ├── secret.yaml │ │ │ ├── service.yaml │ │ │ ├── ingress.yaml │ │ │ └── rbac.yaml │ │ ├── Chart.yaml │ │ └── values.yaml │ └── docker │ │ ├── run-metabase.sh │ │ └── build-docker.sh ├── hive │ ├── spark-thrift-server │ │ ├── src │ │ │ ├── test │ │ │ │ ├── java │ │ │ │ │ └── README.md │ │ │ │ └── resources │ │ │ │ │ ├── README.md │ │ │ │ │ └── data │ │ │ │ │ └── test.json │ │ │ └── main │ │ │ │ ├── resources │ │ │ │ └── README.md │ │ │ │ └── java │ │ │ │ └── com │ │ │ │ └── cloudcheflabs │ │ │ │ └── dataroaster │ │ │ │ └── hive │ │ │ │ └── SparkThriftServerRunner.java │ │ └── bin │ │ │ └── spark-thrift-server-service.yaml │ └── hive-metastore │ │ ├── chart │ │ ├── Chart.yaml │ │ └── values.yaml │ │ └── cr │ │ ├── hive-metastore-mysql.yaml │ │ └── hive-metastore.yaml ├── mysql │ └── chart │ │ ├── values.yaml │ │ └── Chart.yaml ├── redash │ ├── chart │ │ ├── Chart.yaml │ │ ├── values.yaml │ │ └── templates │ │ │ └── ingress.yaml │ └── cr │ │ └── helm-redash.yaml ├── airflow │ └── docker │ │ └── Dockerfile ├── nfs │ └── cr │ │ └── nfs.yaml ├── kafka │ └── cr │ │ └── helm-kafka.yaml └── jupyterhub │ └── cr │ └── helm-jupyterhub.yaml ├── operators ├── helm │ ├── helm-operator │ │ ├── src │ │ │ ├── test │ │ │ │ ├── java │ │ │ │ │ └── README.md │ │ │ │ └── resources │ │ │ │ │ └── cr │ │ │ │ │ ├── helm-nginx.yaml │ │ │ │ │ ├── helm-mysql.yaml │ │ │ │ │ └── helm-nfs.yaml │ │ │ └── main │ │ │ │ ├── java │ │ │ │ └── com │ │ │ │ │ └── cloudcheflabs │ │ │ │ │ └── dataroaster │ │ │ │ │ └── operators │ │ │ │ │ └── helm │ │ │ │ │ ├── handler │ │ │ │ │ ├── ActionHandler.java │ │ │ │ │ ├── HelmChartActionEvent.java │ │ │ │ │ └── HelmChartClient.java │ │ │ │ │ ├── crd │ │ │ │ │ ├── HelmChartList.java │ │ │ │ │ └── HelmChart.java │ │ │ │ │ ├── config │ │ │ │ │ ├── HelmConfig.java │ │ │ │ │ └── SpringContextSingleton.java │ │ │ │ │ └── util │ │ │ │ │ ├── YamlUtils.java │ │ │ │ │ └── FileUtils.java │ │ │ │ └── resources │ │ │ │ ├── templates │ │ │ │ └── kubeconfig │ │ │ │ │ └── config │ │ │ │ └── logback.xml │ │ └── docker │ │ │ ├── run-helm-operator.sh │ │ │ └── Dockerfile │ └── chart │ │ ├── Chart.yaml │ │ ├── values.yaml │ │ └── templates │ │ ├── helm-charts.yaml │ │ ├── rbac.yaml │ │ └── deployment.yaml ├── trino │ ├── trino-operator │ │ ├── src │ │ │ ├── main │ │ │ │ ├── resources │ │ │ │ │ ├── application.properties │ │ │ │ │ ├── application-prod.yml │ │ │ │ │ ├── application-dev.yml │ │ │ │ │ └── logback.xml │ │ │ │ └── java │ │ │ │ │ └── com │ │ │ │ │ └── cloudcheflabs │ │ │ │ │ └── dataroaster │ │ │ │ │ └── operators │ │ │ │ │ └── trino │ │ │ │ │ ├── handler │ │ │ │ │ ├── ActionHandler.java │ │ │ │ │ ├── TrinoClusterActionEvent.java │ │ │ │ │ └── TrinoClusterClient.java │ │ │ │ │ ├── crd │ │ │ │ │ ├── TrinoClusterList.java │ │ │ │ │ ├── TrinoCluster.java │ │ │ │ │ ├── Config.java │ │ │ │ │ ├── Autoscaler.java │ │ │ │ │ └── Resources.java │ │ │ │ │ ├── api │ │ │ │ │ └── dao │ │ │ │ │ │ ├── ResourceDao.java │ │ │ │ │ │ └── K8sResourceDao.java │ │ │ │ │ ├── domain │ │ │ │ │ ├── Roles.java │ │ │ │ │ ├── Privileges.java │ │ │ │ │ ├── BasicAuthentication.java │ │ │ │ │ └── CustomResource.java │ │ │ │ │ ├── dao │ │ │ │ │ ├── AbstractKubernetesResourceDao.java │ │ │ │ │ └── KubernetesResourceDao.java │ │ │ │ │ └── util │ │ │ │ │ ├── Base64Utils.java │ │ │ │ │ ├── KubernetesUtils.java │ │ │ │ │ └── YamlUtils.java │ │ │ └── test │ │ │ │ ├── resources │ │ │ │ └── k8s-manifests │ │ │ │ │ └── deploy-nginx.yaml │ │ │ │ └── java │ │ │ │ └── com │ │ │ │ └── cloudcheflabs │ │ │ │ └── dataroaster │ │ │ │ └── operators │ │ │ │ └── trino │ │ │ │ └── component │ │ │ │ └── SimpleHttpClient.java │ │ └── docker │ │ │ ├── run-trino-operator.sh │ │ │ └── Dockerfile │ └── chart │ │ ├── Chart.yaml │ │ ├── templates │ │ ├── service.yaml │ │ ├── cm.yaml │ │ └── rbac.yaml │ │ └── values.yaml ├── dataroaster │ ├── dataroaster-operator │ │ ├── src │ │ │ ├── main │ │ │ │ ├── resources │ │ │ │ │ ├── application.properties │ │ │ │ │ ├── templates │ │ │ │ │ │ ├── spark-thrift-server │ │ │ │ │ │ │ ├── s3-secret.yaml │ │ │ │ │ │ │ ├── spark-thrift-server-pvc.yaml │ │ │ │ │ │ │ ├── spark-thrift-server-service.yaml │ │ │ │ │ │ │ ├── cluster-role.yaml │ │ │ │ │ │ │ └── cluster-rolebinding.yaml │ │ │ │ │ │ ├── hive-metastore │ │ │ │ │ │ │ └── hive-metastore-mysql.yaml │ │ │ │ │ │ └── nfs │ │ │ │ │ │ │ └── nfs.yaml │ │ │ │ │ ├── application-prod.yml │ │ │ │ │ └── application-dev.yml │ │ │ │ └── java │ │ │ │ │ └── com │ │ │ │ │ └── cloudcheflabs │ │ │ │ │ └── dataroaster │ │ │ │ │ └── operators │ │ │ │ │ └── dataroaster │ │ │ │ │ ├── api │ │ │ │ │ ├── dao │ │ │ │ │ │ ├── common │ │ │ │ │ │ │ ├── GenericDao.java │ │ │ │ │ │ │ └── Operations.java │ │ │ │ │ │ ├── UsersDao.java │ │ │ │ │ │ ├── UserTokenDao.java │ │ │ │ │ │ ├── ComponentsDao.java │ │ │ │ │ │ ├── CustomResourceDao.java │ │ │ │ │ │ └── K8sResourceDao.java │ │ │ │ │ └── service │ │ │ │ │ │ ├── UsersService.java │ │ │ │ │ │ ├── UserTokenService.java │ │ │ │ │ │ ├── ComponentsService.java │ │ │ │ │ │ ├── CustomResourceService.java │ │ │ │ │ │ └── K8sResourceService.java │ │ │ │ │ ├── domain │ │ │ │ │ ├── Roles.java │ │ │ │ │ ├── Privileges.java │ │ │ │ │ ├── BasicAuthentication.java │ │ │ │ │ └── model │ │ │ │ │ │ ├── Components.java │ │ │ │ │ │ └── UserToken.java │ │ │ │ │ ├── dao │ │ │ │ │ ├── common │ │ │ │ │ │ ├── AbstractKubernetesDao.java │ │ │ │ │ │ ├── AbstractDao.java │ │ │ │ │ │ └── GenericHibernateDao.java │ │ │ │ │ └── hibernate │ │ │ │ │ │ ├── HibernateUsersDao.java │ │ │ │ │ │ ├── HibernateUserTokenDao.java │ │ │ │ │ │ └── HibernateComponentsDao.java │ │ │ │ │ ├── util │ │ │ │ │ ├── Base64Utils.java │ │ │ │ │ ├── RandomUtils.java │ │ │ │ │ ├── BCryptUtils.java │ │ │ │ │ ├── TokenUtils.java │ │ │ │ │ ├── YamlUtils.java │ │ │ │ │ └── TempFileUtils.java │ │ │ │ │ ├── kubernetes │ │ │ │ │ └── client │ │ │ │ │ │ └── KubernetesClientUtils.java │ │ │ │ │ ├── component │ │ │ │ │ └── SpringContextSingleton.java │ │ │ │ │ ├── config │ │ │ │ │ └── FilterConfigurer.java │ │ │ │ │ └── service │ │ │ │ │ └── common │ │ │ │ │ └── AbstractService.java │ │ │ └── test │ │ │ │ ├── resources │ │ │ │ ├── cr │ │ │ │ │ ├── spark-operator.yaml │ │ │ │ │ └── trino-operator.yaml │ │ │ │ └── application-test.properties │ │ │ │ └── java │ │ │ │ └── com │ │ │ │ └── cloudcheflabs │ │ │ │ └── dataroaster │ │ │ │ └── operators │ │ │ │ └── dataroaster │ │ │ │ ├── component │ │ │ │ └── DBSchemaCreatorTestRunner.java │ │ │ │ ├── util │ │ │ │ ├── RandomUtilsTestRunner.java │ │ │ │ ├── IdUtilsTestRunner.java │ │ │ │ └── TokenUtilsTestRunner.java │ │ │ │ └── test │ │ │ │ └── SpringBootTestRunnerBase.java │ │ ├── docs │ │ │ └── images │ │ │ │ └── dataroaster-architecture.jpg │ │ └── docker │ │ │ ├── run-dataroaster-operator.sh │ │ │ └── create-db-schema.sh │ └── chart │ │ ├── templates │ │ ├── _helpers.tpl │ │ ├── namespace.yaml │ │ ├── service.yaml │ │ ├── configmap.yaml │ │ └── rbac.yaml │ │ ├── Chart.yaml │ │ └── values.yaml └── spark │ ├── chart │ ├── Chart.yaml │ ├── values.yaml │ └── templates │ │ └── rbac.yaml │ └── spark-operator │ ├── docker │ ├── run-spark-operator.sh │ └── kubernetes.repo │ └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── cloudcheflabs │ │ │ └── dataroaster │ │ │ └── operators │ │ │ └── spark │ │ │ ├── api │ │ │ └── dao │ │ │ │ └── ResourceDao.java │ │ │ ├── crd │ │ │ ├── SparkApplicationList.java │ │ │ ├── SparkApplication.java │ │ │ ├── Resources.java │ │ │ └── ValueFrom.java │ │ │ ├── handler │ │ │ ├── ActionHandler.java │ │ │ └── SparkApplicationActionEvent.java │ │ │ ├── dao │ │ │ └── kubernetes │ │ │ │ ├── AbstractKubernetesResourceDao.java │ │ │ │ └── KubernetesResourceDao.java │ │ │ ├── config │ │ │ ├── SpringContextSingleton.java │ │ │ └── APIConfig.java │ │ │ └── util │ │ │ ├── KubernetesUtils.java │ │ │ ├── HttpUtils.java │ │ │ └── SparkApplicationExecutor.java │ └── resources │ │ ├── logback.xml │ │ └── templates │ │ └── kubeconfig │ │ └── config │ └── test │ └── resources │ ├── pyspark │ └── pi.py │ └── cr │ └── pi-py.yaml ├── trino-ecosystem ├── trino-controller │ ├── chart │ │ ├── templates │ │ │ ├── _helpers.tpl │ │ │ ├── namespace.yaml │ │ │ ├── service.yaml │ │ │ ├── configmap.yaml │ │ │ └── rbac.yaml │ │ ├── Chart.yaml │ │ └── values.yaml │ ├── src │ │ ├── main │ │ │ ├── resources │ │ │ │ ├── application.properties │ │ │ │ ├── templates │ │ │ │ │ └── cr │ │ │ │ │ │ ├── cert-manager.yaml │ │ │ │ │ │ ├── prod-issuer.yaml │ │ │ │ │ │ ├── nginx-ingress-controller.yaml │ │ │ │ │ │ ├── grafana.yaml │ │ │ │ │ │ ├── prometheus.yaml │ │ │ │ │ │ └── trino-gateway.yaml │ │ │ │ ├── application-prod.yml │ │ │ │ ├── application-dev.yml │ │ │ │ └── logback.xml │ │ │ └── java │ │ │ │ └── com │ │ │ │ └── cloudcheflabs │ │ │ │ └── dataroaster │ │ │ │ └── trino │ │ │ │ └── controller │ │ │ │ ├── api │ │ │ │ ├── dao │ │ │ │ │ ├── ClusterJmxDao.java │ │ │ │ │ ├── ScaleWorkerDao.java │ │ │ │ │ ├── K8sResourceDao.java │ │ │ │ │ └── RegisterClusterDao.java │ │ │ │ └── service │ │ │ │ │ ├── ClusterJmxService.java │ │ │ │ │ ├── ScaleWorkerService.java │ │ │ │ │ ├── K8sResourceService.java │ │ │ │ │ └── RegisterClusterService.java │ │ │ │ ├── util │ │ │ │ ├── PauseUtils.java │ │ │ │ └── Base64Utils.java │ │ │ │ ├── domain │ │ │ │ ├── Roles.java │ │ │ │ ├── Privileges.java │ │ │ │ ├── BasicAuthentication.java │ │ │ │ ├── CustomResource.java │ │ │ │ └── RestResponse.java │ │ │ │ ├── dao │ │ │ │ └── common │ │ │ │ │ ├── AbstractKubernetesDao.java │ │ │ │ │ └── AbstractRestDao.java │ │ │ │ ├── config │ │ │ │ └── HttpClientConfigurer.java │ │ │ │ ├── service │ │ │ │ └── ClusterJmxServiceImpl.java │ │ │ │ └── component │ │ │ │ └── SimpleHttpClient.java │ │ └── test │ │ │ ├── resources │ │ │ ├── memory-properties │ │ │ │ ├── memory.properties │ │ │ │ └── config.properties │ │ │ └── trino-pod-template │ │ │ │ ├── worker-pod-template.yaml │ │ │ │ └── coordinator-pod-template.yaml │ │ │ └── java │ │ │ └── com │ │ │ └── cloudcheflabs │ │ │ └── dataroaster │ │ │ └── trino │ │ │ └── controller │ │ │ ├── util │ │ │ └── TemplateUtilsTestRunner.java │ │ │ └── component │ │ │ └── SpringContextSingleton.java │ ├── docs │ │ └── images │ │ │ └── trino-gateway-architecture-with-controller.jpg │ └── docker │ │ ├── run-trino-controller.sh │ │ └── Dockerfile └── trino-gateway │ ├── src │ ├── main │ │ ├── resources │ │ │ ├── application.properties │ │ │ ├── application-dev.yml │ │ │ └── application-prod.yml │ │ └── java │ │ │ └── com │ │ │ └── cloudcheflabs │ │ │ └── dataroaster │ │ │ └── trino │ │ │ └── gateway │ │ │ ├── api │ │ │ ├── dao │ │ │ │ ├── CacheDao.java │ │ │ │ ├── common │ │ │ │ │ ├── GenericDao.java │ │ │ │ │ └── Operations.java │ │ │ │ ├── UsersDao.java │ │ │ │ ├── ClusterDao.java │ │ │ │ └── ClusterGroupDao.java │ │ │ └── service │ │ │ │ ├── CacheService.java │ │ │ │ ├── UsersService.java │ │ │ │ ├── ClusterService.java │ │ │ │ ├── TrinoActiveQueryCountRestService.java │ │ │ │ └── ClusterGroupService.java │ │ │ ├── util │ │ │ ├── RandomUtils.java │ │ │ ├── Base64Utils.java │ │ │ ├── BCryptUtils.java │ │ │ └── TempFileUtils.java │ │ │ ├── domain │ │ │ ├── Roles.java │ │ │ ├── Privileges.java │ │ │ ├── BasicAuthentication.java │ │ │ ├── ClusterWithActiveQueryCount.java │ │ │ ├── TrinoActiveQueryCount.java │ │ │ ├── RestResponse.java │ │ │ ├── TrinoResponse.java │ │ │ └── model │ │ │ │ └── Users.java │ │ │ ├── dao │ │ │ ├── redis │ │ │ │ ├── RedisTrinoResponseCacheDao.java │ │ │ │ └── RedisTrinoActiveQueryCountCacheDao.java │ │ │ ├── common │ │ │ │ ├── AbstractDao.java │ │ │ │ └── GenericHibernateDao.java │ │ │ └── hibernate │ │ │ │ ├── HibernateUsersDao.java │ │ │ │ ├── HibernateClusterDao.java │ │ │ │ └── HibernateClusterGroupDao.java │ │ │ ├── config │ │ │ ├── HttpClientConfigurer.java │ │ │ └── FilterConfigurer.java │ │ │ ├── component │ │ │ ├── SimpleHttpClient.java │ │ │ └── SpringContextSingleton.java │ │ │ ├── cache │ │ │ └── TrinoResponseRedisCache.java │ │ │ └── service │ │ │ └── common │ │ │ └── AbstractService.java │ └── test │ │ ├── resources │ │ ├── gzip │ │ │ └── amendments.txt.gz │ │ └── application-test.properties │ │ └── java │ │ └── com │ │ └── cloudcheflabs │ │ └── dataroaster │ │ └── trino │ │ └── gateway │ │ ├── TrinoProxyTestRunner.java │ │ ├── util │ │ └── BCryptTestRunner.java │ │ ├── SampleTestRunner.java │ │ └── component │ │ └── SimpleHttpClient.java │ ├── docs │ └── images │ │ └── trino-gateway.jpg │ ├── chart │ ├── templates │ │ ├── _helpers.tpl │ │ ├── service.yaml │ │ ├── rest-ingress.yaml │ │ ├── proxy-ingress.yaml │ │ └── rbac.yaml │ └── Chart.yaml │ └── docker │ ├── run-trino-gateway.sh │ ├── create-db-schema.sh │ └── Dockerfile ├── .gitignore └── common └── src └── main └── java └── com └── cloudcheflabs └── dataroaster └── common └── util ├── StringUtils.java └── TemplateUtils.java /components/dbt/example/silver/analyses/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /components/dbt/example/silver/macros/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /components/dbt/example/silver/seeds/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /components/dbt/example/silver/tests/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /components/livy/chart/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /components/metabase/chart/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /components/dbt/example/silver/snapshots/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/test/java/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /components/hive/spark-thrift-server/src/test/java/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/chart/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /components/hive/spark-thrift-server/src/main/resources/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /components/hive/spark-thrift-server/src/test/resources/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | spring.profiles.active=@api.spring.profiles.active@ -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | spring.profiles.active=@api.spring.profiles.active@ -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | spring.profiles.active=@api.spring.profiles.active@ -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | spring.profiles.active=@api.spring.profiles.active@ -------------------------------------------------------------------------------- /components/metabase/docker/run-metabase.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux; 4 | 5 | echo "starting metabase..."; 6 | 7 | java -jar metabase.jar -------------------------------------------------------------------------------- /components/mysql/chart/values.yaml: -------------------------------------------------------------------------------- 1 | storage: 2 | storageClass: standard 3 | size: 5Gi 4 | rootPassword: mysqlpass123 5 | dnsPolicy: ClusterFirst -------------------------------------------------------------------------------- /components/livy/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: dataroasterlivy 3 | version: 1.0.0 4 | appVersion: 0.7.1 5 | description: dataroaster livy. 6 | 7 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/resources/application-prod.yml: -------------------------------------------------------------------------------- 1 | # spring boot server 2 | server: 3 | port: 8092 4 | servlet: 5 | context-path: / -------------------------------------------------------------------------------- /components/mysql/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: dataroastermysql 3 | version: v1.1.0 4 | appVersion: 5.7 5 | description: dataroaster mysql. 6 | 7 | 8 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/resources/application-dev.yml: -------------------------------------------------------------------------------- 1 | # spring boot server 2 | server: 3 | port: 8092 4 | servlet: 5 | context-path: / 6 | 7 | -------------------------------------------------------------------------------- /components/metabase/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: dataroastermetabase 3 | version: 1.1.0 4 | appVersion: v0.44.3 5 | description: dataroaster metabase. 6 | 7 | -------------------------------------------------------------------------------- /operators/dataroaster/chart/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{- define "jdbc.url" -}}jdbc:mysql://mysql-service.{{ .Release.Namespace }}.svc:3306/dataroaster?useSSL=false{{- end -}} -------------------------------------------------------------------------------- /operators/helm/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v2 3 | name: dataroasterhelmoperator 4 | version: v3.0.0 5 | appVersion: v2.0.1 6 | description: dataroaster helm operator -------------------------------------------------------------------------------- /operators/trino/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v2 3 | name: dataroastertrinooperator 4 | version: v2.3.0 5 | appVersion: v3.1.1 6 | description: dataroaster trino operator -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .classpath 2 | .project 3 | .settings 4 | */.settings 5 | */.vertx 6 | target/ 7 | */target/** 8 | .idea 9 | *.iml 10 | *.lck 11 | *.log 12 | *.retry 13 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/chart/templates/namespace.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: Namespace 4 | metadata: 5 | name: {{ .Values.trino.operator.namespace }} 6 | -------------------------------------------------------------------------------- /components/redash/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: dataroasterredash 3 | version: 2.2.0 4 | appVersion: 10.0.0-beta.b49597 5 | description: dataroaster redash. 6 | 7 | 8 | -------------------------------------------------------------------------------- /operators/spark/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: dataroastersparkoperator 3 | version: v1.2.0 4 | appVersion: v3.4.0 5 | description: dataroaster spark operator 6 | 7 | 8 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/docs/images/trino-gateway.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cloudcheflabs/dataroaster/HEAD/trino-ecosystem/trino-gateway/docs/images/trino-gateway.jpg -------------------------------------------------------------------------------- /components/hive/hive-metastore/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: dataroaster-hivemetastore 3 | version: v2.2.0 4 | appVersion: v3.3.5 5 | description: dataroaster hivemetastore. 6 | 7 | 8 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/docker/run-helm-operator.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux; 4 | 5 | java \ 6 | -cp ./helm-operator-*-fat.jar \ 7 | com.cloudcheflabs.dataroaster.operators.helm.HelmOperator; -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/test/resources/gzip/amendments.txt.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cloudcheflabs/dataroaster/HEAD/trino-ecosystem/trino-gateway/src/test/resources/gzip/amendments.txt.gz -------------------------------------------------------------------------------- /operators/spark/spark-operator/docker/run-spark-operator.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux; 4 | 5 | java \ 6 | -cp ./spark-operator-*-fat.jar \ 7 | com.cloudcheflabs.dataroaster.operators.spark.SparkOperator; -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/test/resources/memory-properties/memory.properties: -------------------------------------------------------------------------------- 1 | query.max-memory=16GB 2 | query.max-total-memory=19GB 3 | query.max-memory-per-node=5GB 4 | memory.heap-headroom-per-node=3GB -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/docs/images/dataroaster-architecture.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cloudcheflabs/dataroaster/HEAD/operators/dataroaster/dataroaster-operator/docs/images/dataroaster-architecture.jpg -------------------------------------------------------------------------------- /components/metabase/chart/templates/secret.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: db-secrets 5 | namespace: {{ .Values.namespace }} 6 | type: Opaque 7 | data: 8 | PASSWORD: {{ .Values.db.password | b64enc }} -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/docs/images/trino-gateway-architecture-with-controller.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cloudcheflabs/dataroaster/HEAD/trino-ecosystem/trino-controller/docs/images/trino-gateway-architecture-with-controller.jpg -------------------------------------------------------------------------------- /components/airflow/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM apache/airflow:2.2.3 2 | 3 | USER airflow 4 | 5 | RUN pip install apache-airflow-providers-apache-livy==2.2.2 6 | RUN pip install apache-airflow-providers-amazon==3.2.0 7 | RUN pip install apache-airflow-providers-slack==4.2.3 -------------------------------------------------------------------------------- /operators/dataroaster/chart/templates/namespace.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: Namespace 4 | metadata: 5 | name: {{ .Values.dependency.trino.namespace }} 6 | --- 7 | apiVersion: v1 8 | kind: Namespace 9 | metadata: 10 | name: {{ .Values.dependency.spark.namespace }} -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/dao/CacheDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.dao; 2 | 3 | public interface CacheDao { 4 | 5 | void set(String id, T t); 6 | T get(String id, Class clazz); 7 | } 8 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/resources/templates/spark-thrift-server/s3-secret.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: s3-secret 5 | namespace: {{ operatorNamespace }} 6 | type: Opaque 7 | data: 8 | accessKey: {{ accessKey }} 9 | secretKey: {{ secretKey }} -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/chart/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{- define "jdbc.url" -}}jdbc:mysql://mysql-service.{{ .Release.Namespace }}.svc:3306/trino_proxy?useSSL=false{{- end -}} 2 | 3 | {{- define "redisConnection.host" -}}{{ .Release.Namespace }}-redis-master.{{ .Release.Namespace }}.svc{{- end -}} 4 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/helm/handler/ActionHandler.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.helm.handler; 2 | 3 | public interface ActionHandler { 4 | void create(T t); 5 | void upgrade(T t); 6 | void destroy(T t); 7 | } 8 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/service/CacheService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.service; 2 | 3 | public interface CacheService { 4 | 5 | void set(String id, T t); 6 | T get(String id, Class clazz); 7 | } 8 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/helm/crd/HelmChartList.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.helm.crd; 2 | 3 | import io.fabric8.kubernetes.client.CustomResourceList; 4 | 5 | public class HelmChartList extends CustomResourceList { 6 | } 7 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/handler/ActionHandler.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.handler; 2 | 3 | public interface ActionHandler { 4 | void create(T t); 5 | 6 | void update(T t); 7 | void destroy(T t); 8 | } 9 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/crd/TrinoClusterList.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.crd; 2 | 3 | import io.fabric8.kubernetes.client.CustomResourceList; 4 | 5 | public class TrinoClusterList extends CustomResourceList { 6 | } 7 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/dao/common/GenericDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common; 2 | 3 | import java.io.Serializable; 4 | 5 | public interface GenericDao extends Operations { 6 | // 7 | } 8 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/api/dao/ResourceDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.api.dao; 2 | 3 | import java.util.Map; 4 | 5 | public interface ResourceDao { 6 | Map getSecret(String namespace, String secretName); 7 | } 8 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/api/dao/ResourceDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.api.dao; 2 | 3 | import java.util.Map; 4 | 5 | public interface ResourceDao { 6 | Map getSecret(String namespace, String secretName); 7 | } 8 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/docker/kubernetes.repo: -------------------------------------------------------------------------------- 1 | [kubernetes] 2 | name=Kubernetes 3 | baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64 4 | enabled=1 5 | gpgcheck=0 6 | repo_gpgcheck=0 7 | gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/crd/SparkApplicationList.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.crd; 2 | 3 | import io.fabric8.kubernetes.client.CustomResourceList; 4 | 5 | public class SparkApplicationList extends CustomResourceList { 6 | } 7 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/dao/common/GenericDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common; 2 | 3 | import java.io.Serializable; 4 | 5 | public interface GenericDao extends Operations { 6 | // 7 | } 8 | -------------------------------------------------------------------------------- /components/dbt/example/silver/dbt_project.yml: -------------------------------------------------------------------------------- 1 | 2 | name: "silver" 3 | version: "1.0.0" 4 | config-version: 2 5 | 6 | profile: "trino" 7 | 8 | analysis-paths: ["analyses"] 9 | test-paths: ["tests"] 10 | macro-paths: ["macros"] 11 | snapshot-paths: ["snapshots"] 12 | 13 | target-path: "target" 14 | clean-targets: 15 | - "target" 16 | - "dbt_packages" 17 | -------------------------------------------------------------------------------- /components/livy/chart/templates/pvc.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolumeClaim 3 | metadata: 4 | name: livy-log-pvc 5 | namespace: {{ .Values.namespace }} 6 | spec: 7 | accessModes: 8 | - ReadWriteMany 9 | resources: 10 | requests: 11 | storage: {{ .Values.livy.storageSize }} 12 | storageClassName: {{ .Values.livy.storageClass }} -------------------------------------------------------------------------------- /components/hive/hive-metastore/chart/values.yaml: -------------------------------------------------------------------------------- 1 | namespace: hive-metastore 2 | image: cloudcheflabs/hivemetastore:v3.3.5 3 | dnsPolicy: ClusterFirst 4 | replicas: 1 5 | s3: 6 | bucket: any-bucket 7 | accessKey: any-access-key 8 | secretKey: any-secret 9 | endpoint: any-endpoint 10 | region: any-region 11 | jdbc: 12 | user: root 13 | password: mysqlpass123 -------------------------------------------------------------------------------- /components/hive/spark-thrift-server/bin/spark-thrift-server-service.yaml: -------------------------------------------------------------------------------- 1 | kind: Service 2 | apiVersion: v1 3 | metadata: 4 | name: spark-thrift-server-service 5 | namespace: spark 6 | spec: 7 | type: LoadBalancer 8 | selector: 9 | spark-role: driver 10 | ports: 11 | - name: jdbc-port 12 | port: 10016 13 | protocol: TCP 14 | targetPort: 10016 -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/api/dao/ClusterJmxDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.api.dao; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.controller.domain.RestResponse; 4 | 5 | public interface ClusterJmxDao { 6 | RestResponse listClusterJmxEndpoints(String namespace, String restUri); 7 | } 8 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/dao/UsersDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.dao; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common.Operations; 4 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.model.Users; 5 | 6 | public interface UsersDao extends Operations { 7 | } 8 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/dao/ClusterDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.dao; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common.Operations; 4 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.model.Cluster; 5 | 6 | public interface ClusterDao extends Operations { 7 | } 8 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/api/service/ClusterJmxService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.controller.domain.RestResponse; 4 | 5 | public interface ClusterJmxService { 6 | RestResponse listClusterJmxEndpoints(String namespace, String restUri); 7 | } 8 | -------------------------------------------------------------------------------- /components/dbt/example/silver/models/hive_to_iceberg.sql: -------------------------------------------------------------------------------- 1 | {{ 2 | config( 3 | pre_hook = "set session query_max_run_time='10m'", 4 | materialized = "incremental", 5 | incremental_strategy = "append", 6 | on_table_exists = "drop", 7 | format = "ORC", 8 | using = "ICEBERG" 9 | ) 10 | }} 11 | SELECT 12 | baseproperties.eventtype, 13 | itemid, 14 | price 15 | FROM hive.default.test_parquet -------------------------------------------------------------------------------- /operators/dataroaster/chart/templates/service.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: dataroaster-operator-service 6 | labels: 7 | app: dataroaster-operator 8 | spec: 9 | type: ClusterIP 10 | ports: 11 | - port: {{ .Values.server.port }} 12 | targetPort: rest 13 | protocol: TCP 14 | name: rest 15 | selector: 16 | app: dataroaster-operator 17 | -------------------------------------------------------------------------------- /operators/trino/chart/templates/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: trino-operator-service 5 | namespace: {{ .Values.namespace }} 6 | labels: 7 | app: trino-operator 8 | spec: 9 | type: ClusterIP 10 | ports: 11 | - port: {{ .Values.server.port }} 12 | targetPort: http 13 | protocol: TCP 14 | name: http 15 | selector: 16 | app: trino-operator -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/service/UsersService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common.Operations; 4 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.model.Users; 5 | 6 | public interface UsersService extends Operations { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /components/livy/docker/run-livy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux; 4 | 5 | echo "adding token to kubeconfig..."; 6 | TOKEN=$(cat /var/run/secrets/kubernetes.io/serviceaccount/token); 7 | REPLACE="s//${TOKEN}/g"; 8 | echo "$REPLACE"; 9 | sed -i $REPLACE /opt/livy/.kube/config; 10 | cat /opt/livy/.kube/config; 11 | 12 | echo "starting livy..."; 13 | bin/livy-server start; 14 | tail -f logs/livy-*-server.out; -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/handler/ActionHandler.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.handler; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.spark.crd.SparkApplication; 4 | 5 | public interface ActionHandler { 6 | void submit(SparkApplication sparkApplication); 7 | void destroy(SparkApplication sparkApplication); 8 | } 9 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/dao/ClusterGroupDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.dao; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common.Operations; 4 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.model.ClusterGroup; 5 | 6 | public interface ClusterGroupDao extends Operations { 7 | } 8 | -------------------------------------------------------------------------------- /components/livy/chart/templates/service.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: livy-service 6 | namespace: {{ .Values.namespace }} 7 | labels: 8 | app: livy 9 | component: dataroaster 10 | spec: 11 | type: ClusterIP 12 | ports: 13 | - port: {{ .Values.server.port }} 14 | targetPort: rest 15 | protocol: TCP 16 | name: rest 17 | selector: 18 | app: livy -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/service/ClusterService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common.Operations; 4 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.model.Cluster; 5 | 6 | public interface ClusterService extends Operations { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/resources/templates/spark-thrift-server/spark-thrift-server-pvc.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolumeClaim 3 | metadata: 4 | name: {{ pvcName }} 5 | namespace: {{ namespace }} 6 | labels: {} 7 | annotations: {} 8 | spec: 9 | accessModes: 10 | - ReadWriteMany 11 | resources: 12 | requests: 13 | storage: {{ size }}Gi 14 | storageClassName: nfs 15 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/resources/templates/spark-thrift-server/spark-thrift-server-service.yaml: -------------------------------------------------------------------------------- 1 | kind: Service 2 | apiVersion: v1 3 | metadata: 4 | name: spark-thrift-server-service 5 | namespace: {{ namespace }} 6 | spec: 7 | type: ClusterIP 8 | selector: 9 | spark-role: driver 10 | ports: 11 | - name: jdbc-port 12 | port: 10016 13 | protocol: TCP 14 | targetPort: 10016 -------------------------------------------------------------------------------- /operators/helm/chart/values.yaml: -------------------------------------------------------------------------------- 1 | serviceAccount: helm-operator 2 | image: cloudcheflabs/helm-operator:v2.0.1 3 | imagePullPolicy: Always 4 | resources: 5 | requests: 6 | cpu: 200m 7 | memory: 1Gi 8 | limits: 9 | cpu: 300m 10 | memory: 1500Mi 11 | priorityClassName: "" 12 | annotations: null 13 | affinity: null 14 | tolerations: null 15 | nodeSelector: null 16 | hostNetwork: false 17 | dnsPolicy: ClusterFirst 18 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v2 3 | name: dataroaster-trino-gateway 4 | version: v1.7.1 5 | appVersion: v2.4.0 6 | description: dataroaster trino gateway 7 | 8 | dependencies: 9 | - name: dataroastermysql 10 | version: v1.1.0 11 | repository: https://cloudcheflabs.github.io/mysql-helm-repo/ 12 | - name: redis 13 | version: 16.13.2 14 | repository: https://charts.bitnami.com/bitnami -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/service/TrinoActiveQueryCountRestService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.TrinoActiveQueryCount; 4 | 5 | public interface TrinoActiveQueryCountRestService { 6 | 7 | TrinoActiveQueryCount getTrinoActiveQueryCount(String clusterName); 8 | } 9 | -------------------------------------------------------------------------------- /components/metabase/chart/templates/service.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: metabase-service 6 | namespace: {{ .Values.namespace }} 7 | labels: 8 | app: metabase 9 | component: dataroaster 10 | spec: 11 | type: ClusterIP 12 | ports: 13 | - port: {{ .Values.server.port }} 14 | targetPort: ui 15 | protocol: TCP 16 | name: ui 17 | selector: 18 | app: metabase -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/util/PauseUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.util; 2 | 3 | public class PauseUtils { 4 | 5 | public static void pause(long milliSeconds) { 6 | try { 7 | Thread.sleep(milliSeconds); 8 | } catch (Exception e) { 9 | e.printStackTrace(); 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/resources/templates/cr/cert-manager.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: cert-manager 5 | namespace: {{ customResourceNamespace }} 6 | spec: 7 | repo: https://charts.jetstack.io 8 | chartName: cert-manager 9 | name: cert-manager 10 | version: v1.5.3 11 | namespace: cert-manager 12 | values: | 13 | installCRDs: true -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/util/RandomUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.util; 2 | 3 | import java.util.List; 4 | import java.util.Random; 5 | 6 | public class RandomUtils { 7 | 8 | public static T randomize(List list) { 9 | Random rand = new Random(); 10 | return list.get(rand.nextInt(list.size())); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/dao/UsersDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.Operations; 5 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.Users; 6 | 7 | public interface UsersDao extends Operations { 8 | } 9 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/service/ClusterGroupService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common.Operations; 4 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.model.ClusterGroup; 5 | 6 | public interface ClusterGroupService extends Operations { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/domain/Roles.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.domain; 2 | 3 | public enum Roles { 4 | ROLE_PLATFORM_ADMIN(1000), ROLE_USER(10); 5 | 6 | private int level; 7 | 8 | private Roles(int level) { 9 | this.level = level; 10 | } 11 | 12 | public int getLevel() { 13 | return this.level; 14 | } 15 | } -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/test/resources/cr/spark-operator.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: trino-operator 5 | namespace: helm-operator 6 | spec: 7 | repo: https://cloudcheflabs.github.io/helm-repository/ 8 | chartName: dataroaster-spark-operator 9 | name: spark-operator 10 | version: v1.0.0 11 | namespace: spark-operator 12 | 13 | 14 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/test/resources/cr/trino-operator.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: trino-operator 5 | namespace: helm-operator 6 | spec: 7 | repo: https://cloudcheflabs.github.io/trino-helm-repo/ 8 | chartName: dataroaster-trino-operator 9 | name: trino-operator 10 | version: v1.0.0 11 | namespace: trino-operator 12 | 13 | 14 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/domain/Roles.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.domain; 2 | 3 | public enum Roles { 4 | ROLE_PLATFORM_ADMIN(1000), ROLE_USER(10); 5 | 6 | private int level; 7 | 8 | private Roles(int level) { 9 | this.level = level; 10 | } 11 | 12 | public int getLevel() { 13 | return this.level; 14 | } 15 | } -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/resources/templates/cr/prod-issuer.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: cert-manager.io/v1 2 | kind: ClusterIssuer 3 | metadata: 4 | name: letsencrypt-prod 5 | spec: 6 | acme: 7 | server: https://acme-v02.api.letsencrypt.org/directory 8 | email: mykidong@gmail.com 9 | privateKeySecretRef: 10 | name: letsencrypt-prod 11 | solvers: 12 | - http01: 13 | ingress: 14 | class: nginx -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/test/resources/memory-properties/config.properties: -------------------------------------------------------------------------------- 1 | coordinator=false 2 | http-server.http.port=8080 3 | query.max-memory=20GB 4 | query.max-total-memory=25GB 5 | query.max-memory-per-node=8GB 6 | memory.heap-headroom-per-node=4GB 7 | discovery.uri=http://trino-coordinator-service.trino-etl.svc:8080 8 | jmx.rmiregistry.port=9080 9 | jmx.rmiserver.port=9081 10 | retry-policy=TASK 11 | query.hash-partition-count=50 -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/chart/templates/service.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: Service 4 | metadata: 5 | name: trino-controller-service 6 | namespace: {{ .Values.namespace }} 7 | labels: 8 | app: trino-controller 9 | spec: 10 | type: ClusterIP 11 | ports: 12 | - port: {{ .Values.server.port }} 13 | targetPort: rest 14 | protocol: TCP 15 | name: rest 16 | selector: 17 | app: trino-controller 18 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/domain/Roles.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.domain; 2 | 3 | public enum Roles { 4 | ROLE_PLATFORM_ADMIN(1000), ROLE_USER(10); 5 | 6 | private int level; 7 | 8 | private Roles(int level) { 9 | this.level = level; 10 | } 11 | 12 | public int getLevel() { 13 | return this.level; 14 | } 15 | } -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/dao/UserTokenDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.Operations; 5 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.UserToken; 6 | 7 | public interface UserTokenDao extends Operations { 8 | } 9 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/service/UsersService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.Operations; 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.Users; 5 | 6 | public interface UsersService extends Operations { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/dao/ComponentsDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.Operations; 5 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.Components; 6 | 7 | public interface ComponentsDao extends Operations { 8 | } 9 | -------------------------------------------------------------------------------- /components/redash/chart/values.yaml: -------------------------------------------------------------------------------- 1 | namespace: redash 2 | image: redash/redash:10.0.0-beta.b49597 3 | dnsPolicy: ClusterFirst 4 | storage: 5 | storageClass: oci 6 | size: 10 7 | service: 8 | type: ClusterIP 9 | 10 | server: 11 | port: 5000 12 | 13 | # ingress. 14 | ingress: 15 | enabled: false 16 | ingressClassName: nginx 17 | hostName: dataroaster-redash.cloudchef-labs.com 18 | 19 | # cert-manager. 20 | certManager: 21 | clusterIssue: letsencrypt-prod -------------------------------------------------------------------------------- /operators/spark/chart/values.yaml: -------------------------------------------------------------------------------- 1 | namespace: spark-operator 2 | serviceAccount: spark-operator 3 | image: cloudcheflabs/spark-operator:v3.4.0 4 | imagePullPolicy: Always 5 | resources: 6 | requests: 7 | cpu: 200m 8 | memory: 1Gi 9 | limits: 10 | cpu: 300m 11 | memory: 1500Mi 12 | priorityClassName: "" 13 | annotations: null 14 | affinity: null 15 | tolerations: null 16 | nodeSelector: null 17 | hostNetwork: false 18 | dnsPolicy: ClusterFirst 19 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/service/UserTokenService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.Operations; 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.UserToken; 5 | 6 | public interface UserTokenService extends Operations { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/domain/Roles.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.domain; 2 | 3 | public enum Roles { 4 | ROLE_PLATFORM_ADMIN(1000), ROLE_USER(10); 5 | 6 | private int level; 7 | 8 | private Roles(int level) { 9 | this.level = level; 10 | } 11 | 12 | public int getLevel() { 13 | return this.level; 14 | } 15 | } -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/test/resources/cr/helm-nginx.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: ingress-nginx 5 | namespace: helm-operator 6 | spec: 7 | repo: https://kubernetes.github.io/ingress-nginx 8 | chartName: ingress-nginx 9 | name: ingress-nginx 10 | version: 4.0.17 11 | namespace: ingress-nginx 12 | values: | 13 | replicaCount: 1 14 | minAvailable: 1 15 | 16 | 17 | -------------------------------------------------------------------------------- /operators/trino/chart/templates/cm.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: trino-operator 5 | namespace: {{ .Values.namespace }} 6 | labels: 7 | app: trino-operator 8 | data: 9 | application.properties: | 10 | spring.profiles.active=@api.spring.profiles.active@ 11 | application-prod.yml: | 12 | # spring boot server 13 | server: 14 | port: {{ .Values.server.port }} 15 | servlet: 16 | context-path: / 17 | -------------------------------------------------------------------------------- /components/dbt/example/profiles.yml: -------------------------------------------------------------------------------- 1 | trino: 2 | target: dev 3 | outputs: 4 | dev: 5 | type: trino 6 | method: ldap 7 | user: trino 8 | password: trino123 9 | host: trino-gateway-proxy-test.cloudchef-labs.com 10 | port: 443 11 | database: iceberg 12 | schema: silver 13 | threads: 8 14 | http_scheme: https 15 | session_properties: 16 | query_max_run_time: 5d 17 | exchange_compression: True 18 | -------------------------------------------------------------------------------- /components/hive/hive-metastore/cr/hive-metastore-mysql.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: hive-metastore-mysql 5 | namespace: dataroaster-operator 6 | spec: 7 | repo: https://cloudcheflabs.github.io/mysql-helm-repo/ 8 | chartName: dataroaster-mysql 9 | name: mysql 10 | version: v1.0.1 11 | namespace: hive-metastore 12 | values: | 13 | storage: 14 | storageClass: oci 15 | size: 10Gi -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/service/ComponentsService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.Operations; 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.Components; 5 | 6 | public interface ComponentsService extends Operations { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/docker/run-trino-gateway.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux; 4 | 5 | # run trino gateway spring boot application. 6 | java \ 7 | -cp trino-gateway-*.jar \ 8 | -Dloader.path=/opt/trino-gateway/ \ 9 | -Dspring.config.location=file:///opt/trino-gateway/conf/application.properties \ 10 | -Dspring.config.location=file:///opt/trino-gateway/conf/application-prod.yml \ 11 | -Dspring.profiles.active=prod \ 12 | org.springframework.boot.loader.PropertiesLauncher -------------------------------------------------------------------------------- /components/redash/cr/helm-redash.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: redash 5 | namespace: dataroaster-operator 6 | spec: 7 | repo: https://cloudcheflabs.github.io/redash-helm-repo/ 8 | chartName: dataroasterredash 9 | name: redash 10 | version: v2.0.0 11 | namespace: redash 12 | values: | 13 | storage: 14 | storageClass: oci 15 | size: 2 16 | service: 17 | type: LoadBalancer -------------------------------------------------------------------------------- /operators/trino/trino-operator/docker/run-trino-operator.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux; 4 | 5 | # run trino operator spring boot application. 6 | java \ 7 | -cp trino-operator-*.jar \ 8 | -Dloader.path=/opt/trino-operator/ \ 9 | -Dspring.config.location=file:///opt/trino-operator/conf/application.properties \ 10 | -Dspring.config.location=file:///opt/trino-operator/conf/application-prod.yml \ 11 | -Dspring.profiles.active=prod \ 12 | org.springframework.boot.loader.PropertiesLauncher -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/test/resources/k8s-manifests/deploy-nginx.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: nginx 5 | labels: 6 | app: nginx 7 | spec: 8 | selector: 9 | matchLabels: 10 | app: nginx 11 | template: 12 | metadata: 13 | labels: 14 | app: nginx 15 | spec: 16 | containers: 17 | - image: nginx 18 | name: nginx 19 | ports: 20 | - containerPort: {{ port }} -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/service/CustomResourceService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.Operations; 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.CustomResource; 5 | 6 | public interface CustomResourceService extends Operations { 7 | 8 | } 9 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/resources/application-prod.yml: -------------------------------------------------------------------------------- 1 | # spring boot server 2 | server: 3 | port: 8089 4 | servlet: 5 | context-path: / 6 | # jdbc 7 | jdbc: 8 | driverClassName: com.mysql.jdbc.Driver 9 | url: jdbc:mysql://localhost:3306/dataroaster?useSSL=false 10 | user: admin 11 | pass: Admin123! 12 | 13 | # hibernate 14 | hibernate: 15 | dialect: org.hibernate.dialect.MySQL5Dialect 16 | show_sql: false 17 | globally_quoted_identifiers: true -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/dao/AbstractKubernetesResourceDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.dao; 2 | 3 | import io.fabric8.kubernetes.client.KubernetesClient; 4 | 5 | public abstract class AbstractKubernetesResourceDao { 6 | 7 | protected KubernetesClient client; 8 | 9 | public AbstractKubernetesResourceDao(KubernetesClient client) { 10 | this.client = client; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/resources/templates/cr/nginx-ingress-controller.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: ingress-nginx 5 | namespace: {{ customResourceNamespace }} 6 | spec: 7 | repo: https://kubernetes.github.io/ingress-nginx 8 | chartName: ingress-nginx 9 | name: ingress-nginx 10 | version: 4.0.17 11 | namespace: ingress-nginx 12 | values: | 13 | controller: 14 | replicaCount: 2 -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/resources/application-dev.yml: -------------------------------------------------------------------------------- 1 | # spring boot server 2 | server: 3 | port: 8089 4 | servlet: 5 | context-path: / 6 | # jdbc 7 | jdbc: 8 | driverClassName: com.mysql.jdbc.Driver 9 | url: jdbc:mysql://localhost:3306/dataroaster?useSSL=false 10 | user: admin 11 | pass: Admin123! 12 | 13 | # hibernate 14 | hibernate: 15 | dialect: org.hibernate.dialect.MySQL5Dialect 16 | show_sql: true 17 | globally_quoted_identifiers: true 18 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | 2 | apiVersion: v2 3 | name: dataroaster-trino-controller 4 | version: v1.6.1 5 | appVersion: v1.2.0 6 | description: dataroaster trino controller 7 | 8 | dependencies: 9 | - name: dataroasterhelmoperator 10 | version: v3.0.0 11 | repository: https://cloudcheflabs.github.io/helm-operator-helm-repo/ 12 | - name: dataroastertrinooperator 13 | version: v2.3.0 14 | repository: https://cloudcheflabs.github.io/trino-helm-repo/ 15 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/resources/templates/cr/grafana.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: grafana 5 | namespace: {{ customResourceNamespace }} 6 | spec: 7 | repo: https://grafana.github.io/helm-charts 8 | chartName: grafana 9 | name: grafana 10 | version: 6.32.1 11 | namespace: grafana 12 | values: | 13 | persistence: 14 | enabled: true 15 | storageClassName: {{ storageClass }} 16 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/docker/run-trino-controller.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux; 4 | 5 | # run trino controller spring boot application. 6 | java \ 7 | -cp trino-controller-*.jar \ 8 | -Dloader.path=/opt/trino-controller/ \ 9 | -Dspring.config.location=file:///opt/trino-controller/conf/application.properties \ 10 | -Dspring.config.location=file:///opt/trino-controller/conf/application-prod.yml \ 11 | -Dspring.profiles.active=prod \ 12 | org.springframework.boot.loader.PropertiesLauncher -------------------------------------------------------------------------------- /components/livy/docker/config: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Config 3 | clusters: 4 | - name: default-cluster 5 | cluster: 6 | certificate-authority: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt 7 | server: https://kubernetes.default.svc 8 | contexts: 9 | - name: default-context 10 | context: 11 | cluster: default-cluster 12 | namespace: default 13 | user: default-user 14 | current-context: default-context 15 | users: 16 | - name: default-user 17 | user: 18 | token: -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/test/resources/application-test.properties: -------------------------------------------------------------------------------- 1 | # spring boot server 2 | server.port=8089 3 | server.servlet.context-path=/ 4 | 5 | # jdbc 6 | jdbc.driverClassName=com.mysql.jdbc.Driver 7 | jdbc.url=jdbc:mysql://localhost:3306/dataroaster?useSSL=false 8 | jdbc.user=admin 9 | jdbc.pass=Admin123! 10 | 11 | # hibernate 12 | hibernate.dialect=org.hibernate.dialect.MySQL5Dialect 13 | hibernate.show_sql=true 14 | hibernate.globally_quoted_identifiers=true 15 | 16 | 17 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/test/resources/cr/helm-mysql.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: dataroaster-trino-gateway-mysql 5 | namespace: helm-operator 6 | spec: 7 | repo: https://cloudcheflabs.github.io/mysql-helm-repo/ 8 | chartName: dataroaster-trino-gateway-mysql 9 | name: mysql 10 | version: v1.0.0 11 | namespace: trino-gateway 12 | values: | 13 | storage: 14 | storageClass: standard 15 | 16 | 17 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/dao/kubernetes/AbstractKubernetesResourceDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.dao.kubernetes; 2 | 3 | import io.fabric8.kubernetes.client.KubernetesClient; 4 | 5 | public abstract class AbstractKubernetesResourceDao { 6 | 7 | protected KubernetesClient client; 8 | 9 | public AbstractKubernetesResourceDao(KubernetesClient client) { 10 | this.client = client; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /common/src/main/java/com/cloudcheflabs/dataroaster/common/util/StringUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.common.util; 2 | 3 | import java.util.Base64; 4 | 5 | public class StringUtils { 6 | 7 | public static String base64Encode(String string) { 8 | return new String(Base64.getEncoder().encode(string.getBytes())); 9 | } 10 | 11 | public static String base64Decode(String string) { 12 | return new String(Base64.getDecoder().decode(string.getBytes())); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /operators/trino/chart/values.yaml: -------------------------------------------------------------------------------- 1 | namespace: trino-operator 2 | serviceAccount: trino-operator 3 | image: cloudcheflabs/trino-operator:v3.1.1 4 | imagePullPolicy: Always 5 | replicas: 3 6 | resources: 7 | requests: 8 | cpu: 200m 9 | memory: 1500Mi 10 | limits: 11 | cpu: 500m 12 | memory: 2500Mi 13 | priorityClassName: "" 14 | annotations: null 15 | affinity: null 16 | tolerations: null 17 | nodeSelector: null 18 | hostNetwork: false 19 | dnsPolicy: ClusterFirst 20 | server: 21 | port: 8092 22 | -------------------------------------------------------------------------------- /components/nfs/cr/nfs.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: nfs 5 | namespace: dataroaster-operator 6 | spec: 7 | repo: https://kubernetes-sigs.github.io/nfs-ganesha-server-and-external-provisioner/ 8 | chartName: nfs-server-provisioner 9 | name: nfs 10 | version: 1.4.0 11 | namespace: nfs 12 | values: | 13 | replicaCount: 1 14 | namespace: nfs 15 | persistence: 16 | enabled: true 17 | size: 10Gi 18 | storageClass: oci -------------------------------------------------------------------------------- /components/dbt/example/silver/models/example.sql: -------------------------------------------------------------------------------- 1 | {{ 2 | config( 3 | pre_hook = "set session query_max_run_time='10m'", 4 | materialized = "incremental", 5 | on_table_exists = "drop", 6 | unique_key = "itemid", 7 | incremental_strategy = "delete+insert", 8 | format = "ORC", 9 | properties = { 10 | "partitioning": "ARRAY['itemid']" 11 | }, 12 | using = "ICEBERG" 13 | ) 14 | }} 15 | SELECT 16 | baseproperties.eventtype, 17 | itemid, 18 | price 19 | FROM iceberg.iceberg_db.test_iceberg -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/docker/run-dataroaster-operator.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux; 4 | 5 | # run dataroaster operator spring boot application. 6 | java \ 7 | -cp dataroaster-operator-*.jar \ 8 | -Dloader.path=/opt/dataroaster-operator/ \ 9 | -Dspring.config.location=file:///opt/dataroaster-operator/conf/application.properties \ 10 | -Dspring.config.location=file:///opt/dataroaster-operator/conf/application-prod.yml \ 11 | -Dspring.profiles.active=prod \ 12 | org.springframework.boot.loader.PropertiesLauncher -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/dao/common/AbstractKubernetesDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.dao.common; 2 | 3 | import io.fabric8.kubernetes.client.KubernetesClient; 4 | import org.springframework.beans.factory.annotation.Autowired; 5 | import org.springframework.stereotype.Repository; 6 | 7 | @Repository 8 | public abstract class AbstractKubernetesDao { 9 | 10 | @Autowired 11 | protected KubernetesClient kubernetesClient; 12 | } 13 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/resources/application-prod.yml: -------------------------------------------------------------------------------- 1 | # spring boot server 2 | server: 3 | port: 8093 4 | servlet: 5 | context-path: / 6 | trino: 7 | gateway: 8 | publicEndpoint: "https://trino-gateway-proxy-test.cloudchef-labs.com" 9 | proxyHostName: trino-gateway-proxy-test.cloudchef-labs.com 10 | restHostName: trino-gateway-rest-test.cloudchef-labs.com 11 | storageClass: @trino.gateway.storageClass@ 12 | restUri: "http://localhost:8099" 13 | operator: 14 | restUri: "http://localhost:8092" -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/dao/common/AbstractKubernetesDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.dao.common; 2 | 3 | import io.fabric8.kubernetes.client.KubernetesClient; 4 | import org.springframework.beans.factory.annotation.Autowired; 5 | import org.springframework.stereotype.Repository; 6 | 7 | @Repository 8 | public abstract class AbstractKubernetesDao { 9 | 10 | @Autowired 11 | protected KubernetesClient kubernetesClient; 12 | } 13 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/resources/templates/hive-metastore/hive-metastore-mysql.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: hive-metastore-mysql 5 | namespace: dataroaster-operator 6 | spec: 7 | repo: https://cloudcheflabs.github.io/mysql-helm-repo/ 8 | chartName: dataroastermysql 9 | name: mysql 10 | version: v1.0.2 11 | namespace: {{ namespace }} 12 | values: | 13 | storage: 14 | storageClass: {{ storageClass }} 15 | size: {{ size }}Gi -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/resources/application-dev.yml: -------------------------------------------------------------------------------- 1 | # spring boot server 2 | server: 3 | port: 8093 4 | servlet: 5 | context-path: / 6 | trino: 7 | gateway: 8 | publicEndpoint: "https://trino-gateway-proxy-test.cloudchef-labs.com" 9 | proxyHostName: trino-gateway-proxy-test.cloudchef-labs.com 10 | restHostName: trino-gateway-rest-test.cloudchef-labs.com 11 | storageClass: @trino.gateway.storageClass@ 12 | restUri: "http://localhost:8099" 13 | operator: 14 | restUri: "http://localhost:8092" 15 | 16 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/resources/templates/spark-thrift-server/cluster-role.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | kind: ClusterRole 4 | metadata: 5 | annotations: 6 | rbac.authorization.kubernetes.io/autoupdate: "true" 7 | labels: 8 | kubernetes.io/bootstrapping: rbac-defaults 9 | name: {{ namespace }}-role 10 | rules: 11 | - apiGroups: 12 | - '*' 13 | resources: 14 | - '*' 15 | verbs: 16 | - '*' 17 | - nonResourceURLs: 18 | - '*' 19 | verbs: 20 | - '*' -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/test/resources/cr/helm-nfs.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: nfs 5 | namespace: dataroaster-operator 6 | spec: 7 | repo: https://kubernetes-sigs.github.io/nfs-ganesha-server-and-external-provisioner/ 8 | chartName: nfs-server-provisioner 9 | name: nfs 10 | version: 1.4.0 11 | namespace: nfs 12 | values: | 13 | replicaCount: 1 14 | namespace: nfs 15 | persistence: 16 | enabled: true 17 | size: 10Gi 18 | storageClass: oci -------------------------------------------------------------------------------- /components/hive/spark-thrift-server/src/main/java/com/cloudcheflabs/dataroaster/hive/SparkThriftServerRunner.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.hive; 2 | 3 | public class SparkThriftServerRunner { 4 | 5 | public static void main(String[] args) { 6 | org.apache.spark.sql.hive.thriftserver.HiveThriftServer2.main(args); 7 | 8 | while (true) { 9 | try { 10 | Thread.sleep(Long.MAX_VALUE); 11 | } catch (Exception e) { 12 | e.printStackTrace(); 13 | } 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/dao/CustomResourceDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.Operations; 5 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.CustomResource; 6 | 7 | public interface CustomResourceDao extends Operations { 8 | 9 | CustomResource findCustomResource(String name, String namespace, String kind); 10 | } 11 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/dao/K8sResourceDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.CustomResource; 4 | 5 | public interface K8sResourceDao { 6 | 7 | void createCustomResource(CustomResource customResource); 8 | 9 | void deleteCustomResource(String name, String namespace, String kind); 10 | 11 | void updateCustomResource(CustomResource customResource); 12 | } 13 | -------------------------------------------------------------------------------- /components/kafka/cr/helm-kafka.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: kafka 5 | namespace: dataroaster-operator 6 | spec: 7 | repo: https://charts.bitnami.com/bitnami 8 | chartName: kafka 9 | name: kafka 10 | version: 18.0.0 11 | namespace: kafka 12 | values: | 13 | replicaCount: 3 14 | persistence: 15 | enabled: true 16 | size: 8Gi 17 | storageClass: oci 18 | zookeeper: 19 | replicaCount: 3 20 | persistence: 21 | enabled: true 22 | storageClass: oci -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/api/dao/common/Operations.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | 6 | public interface Operations { 7 | 8 | T findOne(final String id); 9 | 10 | List findAll(); 11 | 12 | void create(final T entity); 13 | 14 | T update(final T entity); 15 | 16 | void delete(final T entity); 17 | 18 | void deleteById(final String entityId); 19 | 20 | } 21 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/domain/Privileges.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.domain; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | 6 | public class Privileges implements Serializable{ 7 | 8 | private List roleList; 9 | 10 | public Privileges() {} 11 | public Privileges(List roleList) { 12 | this.roleList = roleList; 13 | } 14 | 15 | 16 | public List getRoleList() { 17 | return this.roleList; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/util/Base64Utils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.util; 2 | 3 | import java.util.Base64; 4 | 5 | public class Base64Utils { 6 | 7 | public static String encodeBase64(String input) { 8 | return Base64.getEncoder().encodeToString(input.getBytes()); 9 | } 10 | 11 | public static String decodeBase64(String encoded) { 12 | byte[] decodedBytes = Base64.getDecoder().decode(encoded); 13 | return new String(decodedBytes); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/resources/templates/kubeconfig/config: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Config 3 | clusters: 4 | - name: helm-operator-cluster 5 | cluster: 6 | certificate-authority: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt 7 | server: https://kubernetes.default.svc 8 | contexts: 9 | - name: helm-operator-context 10 | context: 11 | cluster: helm-operator-cluster 12 | namespace: default 13 | user: helm-operator-user 14 | current-context: helm-operator-context 15 | users: 16 | - name: helm-operator-user 17 | user: 18 | token: {{ token }} -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/domain/Privileges.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.domain; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | 6 | public class Privileges implements Serializable{ 7 | 8 | private List roleList; 9 | 10 | public Privileges() {} 11 | public Privileges(List roleList) { 12 | this.roleList = roleList; 13 | } 14 | 15 | 16 | public List getRoleList() { 17 | return this.roleList; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/util/Base64Utils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.util; 2 | 3 | import java.util.Base64; 4 | 5 | public class Base64Utils { 6 | 7 | public static String encodeBase64(String input) { 8 | return Base64.getEncoder().encodeToString(input.getBytes()); 9 | } 10 | 11 | public static String decodeBase64(String encoded) { 12 | byte[] decodedBytes = Base64.getDecoder().decode(encoded); 13 | return new String(decodedBytes); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/service/K8sResourceService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.CustomResource; 4 | 5 | public interface K8sResourceService { 6 | 7 | void createCustomResource(CustomResource customResource); 8 | 9 | void deleteCustomResource(String name, String namespace, String kind); 10 | 11 | void updateCustomResource(CustomResource customResource); 12 | } 13 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | %d %highlight(%level) [%blue(%t)] %yellow(%C{1.}): %msg%n%throwable 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | %d %highlight(%level) [%blue(%t)] %yellow(%C{1.}): %msg%n%throwable 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/resources/templates/kubeconfig/config: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Config 3 | clusters: 4 | - name: spark-operator-cluster 5 | cluster: 6 | certificate-authority: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt 7 | server: https://kubernetes.default.svc 8 | contexts: 9 | - name: spark-operator-context 10 | context: 11 | cluster: spark-operator-cluster 12 | namespace: default 13 | user: spark-operator-user 14 | current-context: spark-operator-context 15 | users: 16 | - name: spark-operator-user 17 | user: 18 | token: {{ token }} -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | %d %highlight(%level) [%blue(%t)] %yellow(%C{1.}): %msg%n%throwable 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/domain/Privileges.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.domain; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | 6 | public class Privileges implements Serializable{ 7 | 8 | private List roleList; 9 | 10 | public Privileges() {} 11 | public Privileges(List roleList) { 12 | this.roleList = roleList; 13 | } 14 | 15 | 16 | public List getRoleList() { 17 | return this.roleList; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/util/Base64Utils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.util; 2 | 3 | import java.util.Base64; 4 | 5 | public class Base64Utils { 6 | 7 | public static String encodeBase64(String input) { 8 | return Base64.getEncoder().encodeToString(input.getBytes()); 9 | } 10 | 11 | public static String decodeBase64(String encoded) { 12 | byte[] decodedBytes = Base64.getDecoder().decode(encoded); 13 | return new String(decodedBytes); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | %d %highlight(%level) [%blue(%t)] %yellow(%C{1.}): %msg%n%throwable 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/resources/templates/nfs/nfs.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: nfs 5 | namespace: dataroaster-operator 6 | spec: 7 | repo: https://kubernetes-sigs.github.io/nfs-ganesha-server-and-external-provisioner/ 8 | chartName: nfs-server-provisioner 9 | name: nfs 10 | version: 1.4.0 11 | namespace: nfs 12 | values: | 13 | replicaCount: 1 14 | namespace: nfs 15 | persistence: 16 | enabled: true 17 | size: {{ size }}Gi 18 | storageClass: {{ storageClass }} -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/api/dao/common/Operations.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | 6 | public interface Operations { 7 | 8 | T findOne(final String id); 9 | 10 | List findAll(); 11 | 12 | void create(final T entity); 13 | 14 | T update(final T entity); 15 | 16 | void delete(final T entity); 17 | 18 | void deleteById(final String entityId); 19 | 20 | } 21 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/resources/templates/spark-thrift-server/cluster-rolebinding.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: rbac.authorization.k8s.io/v1 3 | kind: ClusterRoleBinding 4 | metadata: 5 | annotations: 6 | rbac.authorization.kubernetes.io/autoupdate: "true" 7 | labels: 8 | kubernetes.io/bootstrapping: rbac-defaults 9 | name: {{ namespace }}-rolebinding 10 | roleRef: 11 | apiGroup: rbac.authorization.k8s.io 12 | kind: ClusterRole 13 | name: {{ namespace }}-role 14 | subjects: 15 | - kind: ServiceAccount 16 | name: {{ namespace }} 17 | namespace: {{ namespace }} -------------------------------------------------------------------------------- /components/hive/hive-metastore/cr/hive-metastore.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: hive-metastore 5 | namespace: dataroaster-operator 6 | spec: 7 | repo: https://cloudcheflabs.github.io/hive-metastore-helm-repo/ 8 | chartName: dataroaster-hivemetastore 9 | name: hive-metastore 10 | version: v2.0.0 11 | namespace: hive-metastore 12 | values: | 13 | image: cloudcheflabs/hivemetastore:v3.0.0 14 | s3: 15 | bucket: mykidong 16 | accessKey: 17 | secretKey: 18 | endpoint: -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/domain/Privileges.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.domain; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | 6 | public class Privileges implements Serializable{ 7 | 8 | private List roleList; 9 | 10 | public Privileges() {} 11 | public Privileges(List roleList) { 12 | this.roleList = roleList; 13 | } 14 | 15 | 16 | public List getRoleList() { 17 | return this.roleList; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/util/Base64Utils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.util; 2 | 3 | import java.util.Base64; 4 | 5 | public class Base64Utils { 6 | 7 | public static String encodeBase64(String input) { 8 | return Base64.getEncoder().encodeToString(input.getBytes()); 9 | } 10 | 11 | public static String decodeBase64(String encoded) { 12 | byte[] decodedBytes = Base64.getDecoder().decode(encoded); 13 | return new String(decodedBytes); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/dao/redis/RedisTrinoResponseCacheDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.dao.redis; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.dao.common.AbstractCacheDao; 4 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.TrinoResponse; 5 | import org.springframework.stereotype.Repository; 6 | 7 | @Repository 8 | public class RedisTrinoResponseCacheDao extends AbstractCacheDao { 9 | 10 | public RedisTrinoResponseCacheDao() { 11 | super(TrinoResponse.class); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/dao/common/AbstractDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.dao.common; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common.Operations; 4 | import com.google.common.base.Preconditions; 5 | 6 | import java.io.Serializable; 7 | 8 | public abstract class AbstractDao implements Operations { 9 | 10 | protected Class clazz; 11 | 12 | protected final void setClazz(final Class clazzToSet) { 13 | clazz = Preconditions.checkNotNull(clazzToSet); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/test/java/com/cloudcheflabs/dataroaster/operators/dataroaster/component/DBSchemaCreatorTestRunner.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.component; 2 | 3 | import org.junit.Test; 4 | 5 | import java.util.Arrays; 6 | 7 | public class DBSchemaCreatorTestRunner { 8 | 9 | @Test 10 | public void createSchema() throws Exception { 11 | // system property -DdataroasterKubeconfig=... must be set before running test. 12 | DBSchemaCreator.main(Arrays.asList("root", "mysqlpass123", "/opt/dataroaster-operator/create-tables.sql").toArray(new String[0])); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/api/dao/ScaleWorkerDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.api.dao; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.controller.domain.RestResponse; 4 | 5 | public interface ScaleWorkerDao { 6 | 7 | RestResponse listWorkerCount(String restUri, String namespace); 8 | RestResponse scaleOutWorkers(String restUri, String namespace, String name, int replicas); 9 | RestResponse listHpa(String restUri, String namespace); 10 | RestResponse updateHpa(String restUri, String namespace, String name, int minReplicas, int maxReplicas); 11 | } 12 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/dao/redis/RedisTrinoActiveQueryCountCacheDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.dao.redis; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.dao.common.AbstractCacheDao; 4 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.TrinoActiveQueryCount; 5 | import org.springframework.stereotype.Repository; 6 | 7 | @Repository 8 | public class RedisTrinoActiveQueryCountCacheDao extends AbstractCacheDao { 9 | 10 | public RedisTrinoActiveQueryCountCacheDao() { 11 | super(TrinoActiveQueryCount.class); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/dao/common/AbstractDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.dao.common; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.Operations; 5 | import com.google.common.base.Preconditions; 6 | 7 | import java.io.Serializable; 8 | 9 | public abstract class AbstractDao implements Operations { 10 | 11 | protected Class clazz; 12 | 13 | protected final void setClazz(final Class clazzToSet) { 14 | clazz = Preconditions.checkNotNull(clazzToSet); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/api/service/ScaleWorkerService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.controller.domain.RestResponse; 4 | 5 | public interface ScaleWorkerService { 6 | RestResponse listWorkerCount(String restUri, String namespace); 7 | RestResponse scaleOutWorkers(String restUri, String namespace, String name, int replicas); 8 | RestResponse listHpa(String restUri, String namespace); 9 | RestResponse updateHpa(String restUri, String namespace, String name, int minReplicas, int maxReplicas); 10 | } 11 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/dao/common/AbstractRestDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.dao.common; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.controller.component.SimpleHttpClient; 4 | import okhttp3.MediaType; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.stereotype.Repository; 7 | 8 | @Repository 9 | public abstract class AbstractRestDao { 10 | 11 | @Autowired 12 | protected SimpleHttpClient simpleHttpClient; 13 | 14 | protected MediaType mediaType = MediaType.parse("application/x-www-form-urlencoded"); 15 | } 16 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/dao/common/GenericHibernateDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.dao.common; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common.GenericDao; 4 | import org.springframework.beans.factory.config.BeanDefinition; 5 | import org.springframework.context.annotation.Scope; 6 | import org.springframework.stereotype.Repository; 7 | 8 | import java.io.Serializable; 9 | 10 | @Repository 11 | @Scope(BeanDefinition.SCOPE_PROTOTYPE) 12 | public class GenericHibernateDao extends AbstractHibernateDao implements GenericDao { 13 | // 14 | } -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/docker/create-db-schema.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux; 4 | 5 | USER=root 6 | PASSWORD=anypass 7 | SQL_PATH=/any/sqlpath 8 | 9 | while getopts "U:P:S:" flag 10 | do 11 | case "${flag}" in 12 | U) USER=${OPTARG};; 13 | P) PASSWORD=${OPTARG};; 14 | S) SQL_PATH=${OPTARG};; 15 | esac 16 | done 17 | 18 | echo "USER: $USER"; 19 | echo "PASSWORD: $PASSWORD"; 20 | echo "SQL_PATH: $SQL_PATH"; 21 | 22 | 23 | # create db schema. 24 | java \ 25 | -cp trino-gateway-*.jar \ 26 | -Dtrino.gateway.createDBSchema=true \ 27 | org.springframework.boot.loader.PropertiesLauncher ${USER} ${PASSWORD} ${SQL_PATH} 28 | -------------------------------------------------------------------------------- /components/dbt/docker/build-docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | set -e -x 5 | 6 | export DBT_IMAGE=cloudcheflabs/dbt:0.21.0 7 | 8 | for i in "$@" 9 | do 10 | case $i in 11 | --image=*) 12 | DBT_IMAGE="${i#*=}" 13 | shift 14 | ;; 15 | *) 16 | # unknown option 17 | ;; 18 | esac 19 | done 20 | 21 | echo "DBT_IMAGE = ${DBT_IMAGE}" 22 | 23 | 24 | set +e -x 25 | 26 | # build docker. 27 | ## remove docker image. 28 | docker rmi -f $(docker images -a | grep dbt | awk '{print $3}') 29 | 30 | set -e -x 31 | 32 | cd ../; 33 | 34 | ## build. 35 | docker build \ 36 | -t ${DBT_IMAGE} \ 37 | ./docker; 38 | 39 | 40 | # push docker image. 41 | docker push ${DBT_IMAGE}; 42 | -------------------------------------------------------------------------------- /components/jupyterhub/cr/helm-jupyterhub.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: jupyterhub 5 | namespace: dataroaster-operator 6 | spec: 7 | repo: https://charts.bitnami.com/bitnami 8 | chartName: jupyterhub 9 | name: jupyterhub 10 | version: 1.3.6 11 | namespace: jupyterhub 12 | values: | 13 | hub: 14 | service: 15 | type: LoadBalancer 16 | singleuser: 17 | image: 18 | registry: docker.io 19 | repository: cloudcheflabs/dataroaster-bitnami-jupyter 20 | tag: 1.5.0 21 | pullPolicy: Always 22 | persistence: 23 | storageClass: oci 24 | size: 15Gi -------------------------------------------------------------------------------- /components/livy/docker/build-docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | set -e -x 5 | 6 | export LIVY_IMAGE=cloudcheflabs/livy:0.7.1 7 | 8 | for i in "$@" 9 | do 10 | case $i in 11 | --image=*) 12 | LIVY_IMAGE="${i#*=}" 13 | shift 14 | ;; 15 | *) 16 | # unknown option 17 | ;; 18 | esac 19 | done 20 | 21 | echo "LIVY_IMAGE = ${LIVY_IMAGE}" 22 | 23 | 24 | set +e -x 25 | 26 | # build docker. 27 | ## remove dcker image. 28 | docker rmi -f $(docker images -a | grep livy | awk '{print $3}') 29 | 30 | set -e -x 31 | 32 | cd ../; 33 | 34 | ## build. 35 | docker build \ 36 | -t ${LIVY_IMAGE} \ 37 | ./docker; 38 | 39 | 40 | # push docker image. 41 | docker push ${LIVY_IMAGE}; 42 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/docker/create-db-schema.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eux; 4 | 5 | USER=root 6 | PASSWORD=anypass 7 | SQL_PATH=/any/sqlpath 8 | 9 | while getopts "U:P:S:" flag 10 | do 11 | case "${flag}" in 12 | U) USER=${OPTARG};; 13 | P) PASSWORD=${OPTARG};; 14 | S) SQL_PATH=${OPTARG};; 15 | esac 16 | done 17 | 18 | echo "USER: $USER"; 19 | echo "PASSWORD: $PASSWORD"; 20 | echo "SQL_PATH: $SQL_PATH"; 21 | 22 | 23 | # create db schema. 24 | java \ 25 | -cp dataroaster-operator-*.jar \ 26 | -Ddataroaster.createDBSchema=true \ 27 | org.springframework.boot.loader.PropertiesLauncher ${USER} ${PASSWORD} ${SQL_PATH} 28 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/util/BCryptUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.util; 2 | 3 | import at.favre.lib.crypto.bcrypt.BCrypt; 4 | 5 | public class BCryptUtils { 6 | private static final int DEFAULT_COST = 8; 7 | 8 | public static String encodeWithBCrypt(String str) { 9 | return BCrypt.withDefaults().hashToString(DEFAULT_COST, str.toCharArray()); 10 | } 11 | 12 | public static boolean isMatched(String str, String bcryptEncodedStr) { 13 | BCrypt.Result result = BCrypt.verifyer().verify(str.getBytes(), bcryptEncodedStr.getBytes()); 14 | return result.verified; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/resources/templates/cr/prometheus.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: prometheus 5 | namespace: {{ customResourceNamespace }} 6 | spec: 7 | repo: https://prometheus-community.github.io/helm-charts 8 | chartName: prometheus 9 | name: prometheus 10 | version: 15.10.2 11 | namespace: prometheus 12 | values: | 13 | alertmanager: 14 | persistentVolume: 15 | storageClass: {{ storageClass }} 16 | server: 17 | persistentVolume: 18 | storageClass: {{ storageClass }} 19 | pushgateway: 20 | persistentVolume: 21 | storageClass: {{ storageClass }} 22 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/dao/common/GenericHibernateDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.dao.common; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.GenericDao; 4 | import org.springframework.beans.factory.config.BeanDefinition; 5 | import org.springframework.context.annotation.Scope; 6 | import org.springframework.stereotype.Repository; 7 | 8 | import java.io.Serializable; 9 | 10 | @Repository 11 | @Scope(BeanDefinition.SCOPE_PROTOTYPE) 12 | public class GenericHibernateDao extends AbstractHibernateDao implements GenericDao { 13 | // 14 | } -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/test/java/com/cloudcheflabs/dataroaster/operators/dataroaster/util/RandomUtilsTestRunner.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.util; 2 | 3 | import org.junit.Test; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | 7 | public class RandomUtilsTestRunner { 8 | 9 | private static Logger LOG = LoggerFactory.getLogger(RandomUtilsTestRunner.class); 10 | 11 | @Test 12 | public void random() throws Exception { 13 | 14 | String random = RandomUtils.randomPassword(); 15 | LOG.info("random: [{}]", random); 16 | 17 | LOG.info("bcrypted: [{}]", BCryptUtils.encodeWithBCrypt(random)); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/helm/crd/HelmChart.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.helm.crd; 2 | 3 | import io.fabric8.kubernetes.api.model.Namespaced; 4 | import io.fabric8.kubernetes.client.CustomResource; 5 | import io.fabric8.kubernetes.model.annotation.Group; 6 | import io.fabric8.kubernetes.model.annotation.Version; 7 | 8 | @Version(HelmChart.VERSION) 9 | @Group(HelmChart.GROUP) 10 | public class HelmChart extends CustomResource implements Namespaced { 11 | 12 | public static final String GROUP = "helm-operator.cloudchef-labs.com"; 13 | public static final String VERSION = "v1beta1"; 14 | 15 | 16 | 17 | } 18 | -------------------------------------------------------------------------------- /operators/dataroaster/chart/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: dataroaster-operator 3 | version: v3.0.8 4 | appVersion: 4.3.0 5 | description: dataroaster operator 6 | 7 | dependencies: 8 | - name: dataroastermysql 9 | version: v1.0.2 10 | repository: https://cloudcheflabs.github.io/mysql-helm-repo/ 11 | - name: dataroasterhelmoperator 12 | version: v1.1.1 13 | repository: https://cloudcheflabs.github.io/helm-operator-helm-repo/ 14 | 15 | - name: dataroastertrinooperator 16 | version: v2.1.5 17 | repository: https://cloudcheflabs.github.io/trino-helm-repo/ 18 | 19 | - name: dataroastersparkoperator 20 | version: v1.0.1 21 | repository: https://cloudcheflabs.github.io/helm-repository/ -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/util/RandomUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.util; 2 | 3 | import java.nio.charset.Charset; 4 | import java.util.Random; 5 | import java.util.UUID; 6 | 7 | public class RandomUtils { 8 | 9 | public static String randomText() { 10 | byte[] array = new byte[20]; 11 | new Random().nextBytes(array); 12 | return new String(array, Charset.forName("UTF-8")); 13 | } 14 | 15 | public static String randomPassword() { 16 | UUID randomUUID = UUID.randomUUID(); 17 | 18 | return randomUUID.toString().replaceAll("-", ""); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/util/BCryptUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.util; 2 | 3 | import at.favre.lib.crypto.bcrypt.BCrypt; 4 | 5 | public class BCryptUtils { 6 | private static final int DEFAULT_COST = 8; 7 | 8 | public static String encodeWithBCrypt(String str) { 9 | return BCrypt.withDefaults().hashToString(DEFAULT_COST, str.toCharArray()); 10 | } 11 | 12 | public static boolean isMatched(String str, String bcryptEncodedStr) { 13 | BCrypt.Result result = BCrypt.verifyer().verify(str.getBytes(), bcryptEncodedStr.getBytes()); 14 | return result.verified; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/crd/SparkApplication.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.crd; 2 | 3 | 4 | import io.fabric8.kubernetes.api.model.Namespaced; 5 | import io.fabric8.kubernetes.client.CustomResource; 6 | import io.fabric8.kubernetes.model.annotation.Group; 7 | import io.fabric8.kubernetes.model.annotation.Version; 8 | 9 | @Version(SparkApplication.VERSION) 10 | @Group(SparkApplication.GROUP) 11 | public class SparkApplication extends CustomResource implements Namespaced { 12 | public static final String GROUP = "spark-operator.cloudchef-labs.com"; 13 | public static final String VERSION = "v1alpha1"; 14 | } 15 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/crd/TrinoCluster.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.crd; 2 | 3 | import io.fabric8.kubernetes.api.model.Namespaced; 4 | import io.fabric8.kubernetes.client.CustomResource; 5 | import io.fabric8.kubernetes.model.annotation.Group; 6 | import io.fabric8.kubernetes.model.annotation.Version; 7 | 8 | @Version(TrinoCluster.VERSION) 9 | @Group(TrinoCluster.GROUP) 10 | public class TrinoCluster extends CustomResource implements Namespaced { 11 | 12 | public static final String GROUP = "trino-operator.cloudchef-labs.com"; 13 | public static final String VERSION = "v1beta1"; 14 | 15 | 16 | 17 | } 18 | -------------------------------------------------------------------------------- /components/metabase/docker/build-docker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | set -e -x 5 | 6 | export METABASE_IMAGE=cloudcheflabs/metabase:v0.44.3 7 | 8 | for i in "$@" 9 | do 10 | case $i in 11 | --image=*) 12 | METABASE_IMAGE="${i#*=}" 13 | shift 14 | ;; 15 | *) 16 | # unknown option 17 | ;; 18 | esac 19 | done 20 | 21 | echo "METABASE_IMAGE = ${METABASE_IMAGE}" 22 | 23 | 24 | set +e -x 25 | 26 | # build docker. 27 | ## remove dcker image. 28 | docker rmi -f $(docker images -a | grep metabase | awk '{print $3}') 29 | 30 | set -e -x 31 | 32 | cd ../; 33 | 34 | ## build. 35 | docker build \ 36 | -t ${METABASE_IMAGE} \ 37 | ./docker; 38 | 39 | 40 | # push docker image. 41 | docker push ${METABASE_IMAGE}; 42 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/util/TokenUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.util; 2 | 3 | import java.util.Base64; 4 | 5 | public class TokenUtils { 6 | 7 | /** 8 | * this token will be created for client to send requests with authorization header. 9 | * 10 | * @return 11 | */ 12 | public static String newToken() { 13 | String generatedString = RandomUtils.randomText(); 14 | String bcrypted = BCryptUtils.encodeWithBCrypt(generatedString); 15 | String encodedString = Base64.getEncoder().encodeToString(bcrypted.getBytes()); 16 | 17 | return encodedString; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/config/HttpClientConfigurer.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.config; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.component.SimpleHttpClient; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | import org.springframework.context.annotation.Bean; 7 | import org.springframework.context.annotation.Configuration; 8 | 9 | @Configuration 10 | public class HttpClientConfigurer { 11 | 12 | private static Logger LOG = LoggerFactory.getLogger(HttpClientConfigurer.class); 13 | 14 | 15 | @Bean 16 | public SimpleHttpClient simpleHttpClient() { 17 | return new SimpleHttpClient(); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /components/dbt/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM k8s.gcr.io/git-sync/git-sync:v3.6.1 2 | 3 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8' 4 | 5 | ENTRYPOINT [] 6 | 7 | USER root 8 | 9 | ENV DEBIAN_FRONTEND noninteractive 10 | 11 | RUN set -eux; \ 12 | apt-get update; \ 13 | apt-get install -y curl --no-install-recommends; \ 14 | apt install python3-pip -y; \ 15 | pip3 install --upgrade pip; \ 16 | pip3 install dbt-core==1.2.1; \ 17 | pip3 install dbt-trino==1.2.2; 18 | 19 | # print dbt version. 20 | RUN dbt --version 21 | 22 | # print git sync version. 23 | RUN /git-sync --version 24 | 25 | # set time zone. 26 | ENV TZ="Asia/Seoul" 27 | 28 | # print date. 29 | RUN echo "current date: $(date)" 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /components/livy/chart/templates/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: livy 5 | namespace: {{ .Values.namespace }} 6 | labels: 7 | app: livy 8 | component: dataroaster 9 | data: 10 | livy.conf: | 11 | livy.spark.master = {{ .Values.livy.master }} 12 | livy.spark.deployMode = {{ .Values.livy.deploy }} 13 | log4j.properties: | 14 | log4j.rootCategory=DEBUG, console 15 | log4j.appender.console=org.apache.log4j.ConsoleAppender 16 | log4j.appender.console.target=System.err 17 | log4j.appender.console.layout=org.apache.log4j.PatternLayout 18 | log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n 19 | log4j.logger.org.eclipse.jetty=WARN 20 | 21 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/config/HttpClientConfigurer.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.config; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.controller.component.SimpleHttpClient; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | import org.springframework.context.annotation.Bean; 7 | import org.springframework.context.annotation.Configuration; 8 | 9 | @Configuration 10 | public class HttpClientConfigurer { 11 | 12 | private static Logger LOG = LoggerFactory.getLogger(HttpClientConfigurer.class); 13 | 14 | 15 | @Bean 16 | public SimpleHttpClient simpleHttpClient() { 17 | return new SimpleHttpClient(); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/helm/handler/HelmChartActionEvent.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.helm.handler; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.helm.crd.HelmChart; 4 | import io.fabric8.kubernetes.client.Watcher; 5 | 6 | public class HelmChartActionEvent { 7 | private Watcher.Action action; 8 | private HelmChart helmChart; 9 | 10 | public HelmChartActionEvent(Watcher.Action action, HelmChart helmChart) { 11 | this.action = action; 12 | this.helmChart = helmChart; 13 | } 14 | 15 | public Watcher.Action getAction() { 16 | return action; 17 | } 18 | 19 | public HelmChart getHelmChart() { 20 | return helmChart; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/crd/Config.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.crd; 2 | 3 | public class Config { 4 | private String name; 5 | private String path; 6 | private String value; 7 | 8 | public String getName() { 9 | return name; 10 | } 11 | 12 | public void setName(String name) { 13 | this.name = name; 14 | } 15 | 16 | public String getPath() { 17 | return path; 18 | } 19 | 20 | public void setPath(String path) { 21 | this.path = path; 22 | } 23 | 24 | public String getValue() { 25 | return value; 26 | } 27 | 28 | public void setValue(String value) { 29 | this.value = value; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/domain/BasicAuthentication.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.domain; 2 | 3 | public class BasicAuthentication { 4 | private String user; 5 | private String password; 6 | 7 | public BasicAuthentication(String user, String password) { 8 | this.user = user; 9 | this.password = password; 10 | } 11 | 12 | public String getUser() { 13 | return user; 14 | } 15 | 16 | public void setUser(String user) { 17 | this.user = user; 18 | } 19 | 20 | public String getPassword() { 21 | return password; 22 | } 23 | 24 | public void setPassword(String password) { 25 | this.password = password; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/helm/config/HelmConfig.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.helm.config; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.helm.handler.HelmChartClient; 5 | import io.fabric8.kubernetes.client.DefaultKubernetesClient; 6 | import io.fabric8.kubernetes.client.KubernetesClient; 7 | import org.springframework.context.annotation.Bean; 8 | import org.springframework.context.annotation.Configuration; 9 | 10 | @Configuration 11 | public class HelmConfig { 12 | 13 | @Bean 14 | public HelmChartClient helmChartClient() { return new HelmChartClient(kubernetesClient()); } 15 | 16 | @Bean 17 | public KubernetesClient kubernetesClient() { return new DefaultKubernetesClient(); } 18 | 19 | } 20 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/domain/BasicAuthentication.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.domain; 2 | 3 | public class BasicAuthentication { 4 | private String user; 5 | private String password; 6 | 7 | public BasicAuthentication(String user, String password) { 8 | this.user = user; 9 | this.password = password; 10 | } 11 | 12 | public String getUser() { 13 | return user; 14 | } 15 | 16 | public void setUser(String user) { 17 | this.user = user; 18 | } 19 | 20 | public String getPassword() { 21 | return password; 22 | } 23 | 24 | public void setPassword(String password) { 25 | this.password = password; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /components/livy/chart/values.yaml: -------------------------------------------------------------------------------- 1 | namespace: livy 2 | serviceAccount: livy 3 | image: cloudcheflabs/livy:0.7.1 4 | imagePullPolicy: Always 5 | replicas: 1 6 | resources: 7 | requests: 8 | cpu: 200m 9 | memory: 1Gi 10 | limits: 11 | cpu: 1000m 12 | memory: 5Gi 13 | priorityClassName: "" 14 | annotations: null 15 | affinity: {} 16 | tolerations: {} 17 | nodeSelector: {} 18 | hostNetwork: false 19 | 20 | server: 21 | port: 8998 22 | 23 | 24 | # readiness probe. 25 | readiness: 26 | initialDelaySeconds: 15 27 | periodSeconds: 10 28 | 29 | # liveness probe. 30 | liveness: 31 | initialDelaySeconds: 3 32 | periodSeconds: 10 33 | 34 | # livy. 35 | livy: 36 | master: k8s://https://kubernetes.default.svc:443 37 | deploy: cluster 38 | storageClass: nfs-external 39 | storageSize: 2Gi -------------------------------------------------------------------------------- /operators/dataroaster/chart/values.yaml: -------------------------------------------------------------------------------- 1 | serviceAccount: dataroaster-operator 2 | image: cloudcheflabs/dataroaster-operator:4.3.0 3 | imagePullPolicy: Always 4 | replicas: 1 5 | resources: 6 | requests: 7 | cpu: 200m 8 | memory: 1Gi 9 | limits: 10 | cpu: 300m 11 | memory: 1500Mi 12 | priorityClassName: "" 13 | annotations: null 14 | affinity: null 15 | tolerations: null 16 | nodeSelector: null 17 | hostNetwork: false 18 | 19 | # spring boot server 20 | server: 21 | port: 8089 22 | 23 | # jdbc 24 | jdbc: 25 | user: admin 26 | pass: Admin123! 27 | 28 | # hibernate 29 | hibernate: 30 | show_sql: false 31 | globally_quoted_identifiers: true 32 | 33 | # dependency 34 | dependency: 35 | trino: 36 | namespace: trino-operator 37 | spark: 38 | namespace: spark-operator 39 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/domain/BasicAuthentication.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.domain; 2 | 3 | public class BasicAuthentication { 4 | private String user; 5 | private String password; 6 | 7 | public BasicAuthentication(String user, String password) { 8 | this.user = user; 9 | this.password = password; 10 | } 11 | 12 | public String getUser() { 13 | return user; 14 | } 15 | 16 | public void setUser(String user) { 17 | this.user = user; 18 | } 19 | 20 | public String getPassword() { 21 | return password; 22 | } 23 | 24 | public void setPassword(String password) { 25 | this.password = password; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/test/java/com/cloudcheflabs/dataroaster/trino/controller/util/TemplateUtilsTestRunner.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.util; 2 | 3 | import com.cloudcheflabs.dataroaster.common.util.TemplateUtils; 4 | import org.junit.Test; 5 | 6 | import java.util.HashMap; 7 | import java.util.Map; 8 | 9 | public class TemplateUtilsTestRunner { 10 | 11 | @Test 12 | public void replace() throws Exception{ 13 | Map kv = new HashMap<>(); 14 | kv.put("customResourceNamespace", "trino-controller"); 15 | String nginxCrString = 16 | TemplateUtils.replace("/templates/cr/nginx-ingress-controller.yaml", true, kv); 17 | System.out.printf("nginxCrString: \n%s", nginxCrString); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/dao/hibernate/HibernateUsersDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.dao.hibernate; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.UsersDao; 5 | import com.cloudcheflabs.dataroaster.trino.gateway.dao.common.AbstractHibernateDao; 6 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.model.Users; 7 | import org.springframework.stereotype.Repository; 8 | import org.springframework.transaction.annotation.Transactional; 9 | 10 | @Repository 11 | @Transactional 12 | public class HibernateUsersDao extends AbstractHibernateDao implements UsersDao { 13 | 14 | public HibernateUsersDao() { 15 | super(); 16 | setClazz(Users.class); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/test/resources/application-test.properties: -------------------------------------------------------------------------------- 1 | # spring boot server 2 | server.port=8099 3 | server.servlet.context-path=/ 4 | 5 | # trino proxy. 6 | trino.proxy.port=18080 7 | trino.proxy.authentication=true 8 | trino.proxy.tls.enabled=true 9 | trino.proxy.tls.keystorePath=/home/opc/keystore.jks 10 | trino.proxy.tls.keystorePass=changeit 11 | trino.proxy.tls.trustStorePath=/home/opc/.keystore 12 | trino.proxy.tls.trustStorePass=changeit 13 | 14 | # jdbc 15 | jdbc.driverClassName=com.mysql.jdbc.Driver 16 | jdbc.url=jdbc:mysql://localhost:3306/trino_proxy?useSSL=false 17 | jdbc.user=trino 18 | jdbc.pass=Trino123! 19 | 20 | # hibernate 21 | hibernate.dialect=org.hibernate.dialect.MySQL5Dialect 22 | hibernate.show_sql=true 23 | hibernate.globally_quoted_identifiers=true 24 | 25 | 26 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/dao/hibernate/HibernateClusterDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.dao.hibernate; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.ClusterDao; 5 | import com.cloudcheflabs.dataroaster.trino.gateway.dao.common.AbstractHibernateDao; 6 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.model.Cluster; 7 | import org.springframework.stereotype.Repository; 8 | import org.springframework.transaction.annotation.Transactional; 9 | 10 | @Repository 11 | @Transactional 12 | public class HibernateClusterDao extends AbstractHibernateDao implements ClusterDao { 13 | 14 | public HibernateClusterDao() { 15 | super(); 16 | setClazz(Cluster.class); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/domain/BasicAuthentication.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.domain; 2 | 3 | public class BasicAuthentication { 4 | private String user; 5 | private String password; 6 | 7 | public BasicAuthentication(String user, String password) { 8 | this.user = user; 9 | this.password = password; 10 | } 11 | 12 | public String getUser() { 13 | return user; 14 | } 15 | 16 | public void setUser(String user) { 17 | this.user = user; 18 | } 19 | 20 | public String getPassword() { 21 | return password; 22 | } 23 | 24 | public void setPassword(String password) { 25 | this.password = password; 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/handler/TrinoClusterActionEvent.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.handler; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.trino.crd.TrinoCluster; 4 | import io.fabric8.kubernetes.client.Watcher; 5 | 6 | public class TrinoClusterActionEvent { 7 | private Watcher.Action action; 8 | private TrinoCluster trinoCluster; 9 | 10 | public TrinoClusterActionEvent(Watcher.Action action, TrinoCluster trinoCluster) { 11 | this.action = action; 12 | this.trinoCluster = trinoCluster; 13 | } 14 | 15 | public Watcher.Action getAction() { 16 | return action; 17 | } 18 | 19 | public TrinoCluster getTrinoCluster() { 20 | return trinoCluster; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/api/dao/K8sResourceDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.api.dao; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.trino.domain.CustomResource; 5 | import io.fabric8.kubernetes.api.model.GenericKubernetesResource; 6 | 7 | import java.util.List; 8 | 9 | public interface K8sResourceDao { 10 | 11 | void createCustomResource(CustomResource customResource); 12 | 13 | void deleteCustomResource(String name, String namespace, String kind); 14 | 15 | void updateCustomResource(CustomResource customResource); 16 | 17 | void updateCustomResource(GenericKubernetesResource genericKubernetesResource); 18 | 19 | List listCustomResources(String namespace, String kind); 20 | } 21 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/api/dao/K8sResourceDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.api.dao; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.trino.controller.domain.CustomResource; 5 | import io.fabric8.kubernetes.api.model.GenericKubernetesResource; 6 | 7 | import java.util.List; 8 | 9 | public interface K8sResourceDao { 10 | 11 | void createCustomResource(CustomResource customResource); 12 | 13 | void deleteCustomResource(String name, String namespace, String kind); 14 | 15 | void updateCustomResource(CustomResource customResource); 16 | 17 | void updateCustomResource(GenericKubernetesResource genericKubernetesResource); 18 | 19 | List listCustomResources(String namespace, String kind); 20 | } 21 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/api/service/K8sResourceService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.controller.domain.CustomResource; 4 | import io.fabric8.kubernetes.api.model.GenericKubernetesResource; 5 | 6 | import java.util.List; 7 | 8 | public interface K8sResourceService { 9 | void createCustomResource(CustomResource customResource); 10 | 11 | void deleteCustomResource(String name, String namespace, String kind); 12 | 13 | void updateCustomResource(CustomResource customResource); 14 | 15 | void updateCustomResource(GenericKubernetesResource genericKubernetesResource); 16 | 17 | List listCustomResources(String namespace, String kind); 18 | } 19 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/dao/hibernate/HibernateUsersDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.dao.hibernate; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.UsersDao; 5 | import com.cloudcheflabs.dataroaster.operators.dataroaster.dao.common.AbstractHibernateDao; 6 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.Users; 7 | import org.springframework.stereotype.Repository; 8 | import org.springframework.transaction.annotation.Transactional; 9 | 10 | @Repository 11 | @Transactional 12 | public class HibernateUsersDao extends AbstractHibernateDao implements UsersDao { 13 | 14 | public HibernateUsersDao() { 15 | super(); 16 | setClazz(Users.class); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/crd/Resources.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.crd; 2 | 3 | public class Resources { 4 | private String cores; 5 | private String limitCores; 6 | private String memory; 7 | 8 | public String getCores() { 9 | return cores; 10 | } 11 | 12 | public void setCores(String cores) { 13 | this.cores = cores; 14 | } 15 | 16 | public String getLimitCores() { 17 | return limitCores; 18 | } 19 | 20 | public void setLimitCores(String limitCores) { 21 | this.limitCores = limitCores; 22 | } 23 | 24 | public String getMemory() { 25 | return memory; 26 | } 27 | 28 | public void setMemory(String memory) { 29 | this.memory = memory; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/resources/templates/cr/trino-gateway.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "helm-operator.cloudchef-labs.com/v1beta1" 2 | kind: HelmChart 3 | metadata: 4 | name: trino-gateway 5 | namespace: {{ customResourceNamespace }} 6 | spec: 7 | repo: https://cloudcheflabs.github.io/trino-gateway-helm-repo/ 8 | chartName: dataroaster-trino-gateway 9 | name: trino-gateway 10 | version: v1.7.1 11 | namespace: trino-gateway 12 | values: | 13 | ingress: 14 | proxyHostName: {{ proxyHostName }} 15 | restHostName: {{ restHostName }} 16 | trino: 17 | proxy: 18 | publicEndpoint: {{ publicEndpoint }} 19 | dataroastermysql: 20 | storage: 21 | storageClass: {{ storageClass }} 22 | redis: 23 | global: 24 | storageClass: {{ storageClass }} 25 | replica: 26 | replicaCount: 1 -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/dao/hibernate/HibernateClusterGroupDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.dao.hibernate; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.ClusterGroupDao; 5 | import com.cloudcheflabs.dataroaster.trino.gateway.dao.common.AbstractHibernateDao; 6 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.model.ClusterGroup; 7 | import org.springframework.stereotype.Repository; 8 | import org.springframework.transaction.annotation.Transactional; 9 | 10 | @Repository 11 | @Transactional 12 | public class HibernateClusterGroupDao extends AbstractHibernateDao implements ClusterGroupDao { 13 | 14 | public HibernateClusterGroupDao() { 15 | super(); 16 | setClazz(ClusterGroup.class); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/test/resources/pyspark/pi.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import sys 4 | from random import random 5 | from operator import add 6 | 7 | from pyspark.sql import SparkSession 8 | 9 | 10 | if __name__ == "__main__": 11 | """ 12 | Usage: pi [partitions] 13 | """ 14 | spark = SparkSession\ 15 | .builder\ 16 | .appName("PythonPi")\ 17 | .getOrCreate() 18 | 19 | partitions = int(sys.argv[1]) if len(sys.argv) > 1 else 2 20 | n = 100000 * partitions 21 | 22 | def f(_): 23 | x = random() * 2 - 1 24 | y = random() * 2 - 1 25 | return 1 if x ** 2 + y ** 2 <= 1 else 0 26 | 27 | count = spark.sparkContext.parallelize(range(1, n + 1), partitions).map(f).reduce(add) 28 | print("Pi is roughly %f" % (4.0 * count / n)) 29 | 30 | spark.stop() -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/chart/templates/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: trino-gateway-proxy-service 5 | namespace: {{ .Values.namespace }} 6 | labels: 7 | app: trino-gateway 8 | spec: 9 | type: ClusterIP 10 | ports: 11 | - port: {{ .Values.trino.proxy.port }} 12 | targetPort: proxy 13 | protocol: TCP 14 | name: trino-gateway-proxy 15 | selector: 16 | app: trino-gateway 17 | 18 | --- 19 | apiVersion: v1 20 | kind: Service 21 | metadata: 22 | name: trino-gateway-rest-service 23 | namespace: {{ .Values.namespace }} 24 | labels: 25 | app: trino-gateway 26 | spec: 27 | type: ClusterIP 28 | ports: 29 | - port: {{ .Values.server.port }} 30 | targetPort: rest 31 | protocol: TCP 32 | name: trino-gateway-rest 33 | selector: 34 | app: trino-gateway 35 | -------------------------------------------------------------------------------- /components/redash/chart/templates/ingress.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.ingress.enabled -}} 2 | apiVersion: networking.k8s.io/v1 3 | kind: Ingress 4 | metadata: 5 | name: redash-ingress 6 | namespace: {{ .Values.namespace }} 7 | annotations: 8 | cert-manager.io/cluster-issuer: {{ .Values.certManager.clusterIssue }} 9 | spec: 10 | ingressClassName: {{ .Values.ingress.ingressClassName }} 11 | rules: 12 | - host: {{ .Values.ingress.hostName }} 13 | http: 14 | paths: 15 | - backend: 16 | service: 17 | name: redash 18 | port: 19 | number: {{ .Values.server.port }} 20 | path: / 21 | pathType: ImplementationSpecific 22 | tls: 23 | - hosts: 24 | - {{ .Values.ingress.hostName }} 25 | secretName: {{ .Values.ingress.hostName }}-tls 26 | {{- end }} -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/test/java/com/cloudcheflabs/dataroaster/operators/dataroaster/util/IdUtilsTestRunner.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.util; 2 | 3 | import org.junit.Assert; 4 | import org.junit.Test; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | 8 | import java.util.Arrays; 9 | import java.util.List; 10 | 11 | public class IdUtilsTestRunner { 12 | 13 | private static Logger LOG = LoggerFactory.getLogger(IdUtilsTestRunner.class); 14 | 15 | @Test 16 | public void newId() throws Exception { 17 | List stringList = Arrays.asList("HelmChart", "mysql", "trino-operator"); 18 | String newId = IdUtils.newId(stringList); 19 | LOG.info("newId: [{}]", newId); 20 | 21 | Assert.assertTrue(IdUtils.isMatched(stringList, newId)); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/handler/SparkApplicationActionEvent.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.handler; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.spark.crd.SparkApplication; 4 | import io.fabric8.kubernetes.client.Watcher; 5 | 6 | public class SparkApplicationActionEvent { 7 | private Watcher.Action action; 8 | private SparkApplication sparkApplication; 9 | 10 | public SparkApplicationActionEvent(Watcher.Action action, SparkApplication sparkApplication) { 11 | this.action = action; 12 | this.sparkApplication = sparkApplication; 13 | } 14 | 15 | public Watcher.Action getAction() { 16 | return action; 17 | } 18 | 19 | public SparkApplication getSparkApplication() { 20 | return sparkApplication; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/chart/templates/rest-ingress.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: networking.k8s.io/v1 2 | kind: Ingress 3 | metadata: 4 | name: trino-gateway-rest-ingress 5 | namespace: {{ .Values.namespace }} 6 | annotations: 7 | cert-manager.io/cluster-issuer: {{ .Values.certManager.clusterIssue }} 8 | spec: 9 | ingressClassName: {{ .Values.ingress.ingressClassName }} 10 | rules: 11 | - host: {{ .Values.ingress.restHostName }} 12 | http: 13 | paths: 14 | - backend: 15 | service: 16 | name: trino-gateway-rest-service 17 | port: 18 | number: {{ .Values.server.port }} 19 | path: / 20 | pathType: ImplementationSpecific 21 | tls: 22 | - hosts: 23 | - {{ .Values.ingress.restHostName }} 24 | secretName: {{ .Values.ingress.restHostName }}-tls -------------------------------------------------------------------------------- /components/metabase/chart/templates/ingress.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.ingress.enabled -}} 2 | apiVersion: networking.k8s.io/v1 3 | kind: Ingress 4 | metadata: 5 | name: metabase-ingress 6 | namespace: {{ .Values.namespace }} 7 | annotations: 8 | cert-manager.io/cluster-issuer: {{ .Values.certManager.clusterIssue }} 9 | spec: 10 | ingressClassName: {{ .Values.ingress.ingressClassName }} 11 | rules: 12 | - host: {{ .Values.ingress.hostName }} 13 | http: 14 | paths: 15 | - backend: 16 | service: 17 | name: metabase-service 18 | port: 19 | number: {{ .Values.server.port }} 20 | path: / 21 | pathType: ImplementationSpecific 22 | tls: 23 | - hosts: 24 | - {{ .Values.ingress.hostName }} 25 | secretName: {{ .Values.ingress.hostName }}-tls 26 | {{- end }} -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/test/java/com/cloudcheflabs/dataroaster/trino/gateway/TrinoProxyTestRunner.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway; 2 | 3 | import org.junit.BeforeClass; 4 | import org.junit.Test; 5 | import org.springframework.boot.SpringApplication; 6 | import org.springframework.context.ConfigurableApplicationContext; 7 | 8 | import java.util.Arrays; 9 | 10 | public class TrinoProxyTestRunner { 11 | 12 | @BeforeClass 13 | public static void setup() throws Exception { 14 | // run spring boot application. 15 | ConfigurableApplicationContext applicationContext = 16 | SpringApplication.run(TrinoGatewayApplication.class, Arrays.asList("").toArray(new String[0])); 17 | } 18 | 19 | 20 | @Test 21 | public void run() throws Exception { 22 | Thread.sleep(Long.MAX_VALUE); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/chart/templates/proxy-ingress.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: networking.k8s.io/v1 2 | kind: Ingress 3 | metadata: 4 | name: trino-gateway-proxy-ingress 5 | namespace: {{ .Values.namespace }} 6 | annotations: 7 | cert-manager.io/cluster-issuer: {{ .Values.certManager.clusterIssue }} 8 | spec: 9 | ingressClassName: {{ .Values.ingress.ingressClassName }} 10 | rules: 11 | - host: {{ .Values.ingress.proxyHostName }} 12 | http: 13 | paths: 14 | - backend: 15 | service: 16 | name: trino-gateway-proxy-service 17 | port: 18 | number: {{ .Values.trino.proxy.port }} 19 | path: / 20 | pathType: ImplementationSpecific 21 | tls: 22 | - hosts: 23 | - {{ .Values.ingress.proxyHostName }} 24 | secretName: {{ .Values.ingress.proxyHostName }}-tls -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/dao/hibernate/HibernateUserTokenDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.dao.hibernate; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.UserTokenDao; 5 | import com.cloudcheflabs.dataroaster.operators.dataroaster.dao.common.AbstractHibernateDao; 6 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.UserToken; 7 | import org.springframework.stereotype.Repository; 8 | import org.springframework.transaction.annotation.Transactional; 9 | 10 | @Repository 11 | @Transactional 12 | public class HibernateUserTokenDao extends AbstractHibernateDao implements UserTokenDao { 13 | 14 | public HibernateUserTokenDao() { 15 | super(); 16 | setClazz(UserToken.class); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/dao/hibernate/HibernateComponentsDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.dao.hibernate; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.ComponentsDao; 5 | import com.cloudcheflabs.dataroaster.operators.dataroaster.dao.common.AbstractHibernateDao; 6 | import com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model.Components; 7 | import org.springframework.stereotype.Repository; 8 | import org.springframework.transaction.annotation.Transactional; 9 | 10 | @Repository 11 | @Transactional 12 | public class HibernateComponentsDao extends AbstractHibernateDao implements ComponentsDao { 13 | 14 | public HibernateComponentsDao() { 15 | super(); 16 | setClazz(Components.class); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /operators/dataroaster/chart/templates/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: dataroaster-operator 5 | labels: 6 | app: dataroaster-operator 7 | data: 8 | application.properties: | 9 | spring.profiles.active=@api.spring.profiles.active@ 10 | application-prod.yml: | 11 | # spring boot server 12 | server: 13 | port: {{ .Values.server.port }} 14 | servlet: 15 | context-path: / 16 | # jdbc 17 | jdbc: 18 | driverClassName: com.mysql.jdbc.Driver 19 | url: {{ template "jdbc.url" . }} 20 | user: {{ .Values.jdbc.user }} 21 | pass: {{ .Values.jdbc.pass }} 22 | # hibernate 23 | hibernate: 24 | dialect: org.hibernate.dialect.MySQL5Dialect 25 | show_sql: {{ .Values.hibernate.show_sql }} 26 | globally_quoted_identifiers: {{ .Values.hibernate.globally_quoted_identifiers }} 27 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/test/java/com/cloudcheflabs/dataroaster/trino/gateway/util/BCryptTestRunner.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.util; 2 | 3 | import org.junit.Assert; 4 | import org.junit.Test; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | 8 | public class BCryptTestRunner { 9 | 10 | private static Logger LOG = LoggerFactory.getLogger(BCryptTestRunner.class); 11 | 12 | @Test 13 | public void bcrypt() throws Exception { 14 | String password = "mypass123"; 15 | 16 | String bcryptEncodedPassword = BCryptUtils.encodeWithBCrypt(password); 17 | Assert.assertTrue(BCryptUtils.isMatched(password, bcryptEncodedPassword)); 18 | LOG.info("bcryptEncodedPassword: [{}]", bcryptEncodedPassword); 19 | 20 | Assert.assertTrue(!BCryptUtils.isMatched(password + "4", bcryptEncodedPassword)); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/chart/templates/configmap.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: trino-controller 5 | namespace: {{ .Values.namespace }} 6 | labels: 7 | app: trino-controller 8 | data: 9 | application.properties: | 10 | spring.profiles.active=@api.spring.profiles.active@ 11 | application-prod.yml: | 12 | server: 13 | port: {{ .Values.server.port }} 14 | servlet: 15 | context-path: / 16 | trino: 17 | gateway: 18 | publicEndpoint: {{ .Values.trino.gateway.publicEndpoint }} 19 | proxyHostName: {{ .Values.trino.gateway.proxyHostName }} 20 | restHostName: {{ .Values.trino.gateway.restHostName }} 21 | storageClass: {{ .Values.trino.gateway.storageClass }} 22 | restUri: {{ .Values.trino.gateway.restUri }} 23 | operator: 24 | restUri: {{ .Values.trino.operator.restUri }} 25 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/dao/KubernetesResourceDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.dao; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.trino.api.dao.ResourceDao; 4 | import io.fabric8.kubernetes.client.KubernetesClient; 5 | import io.fabric8.kubernetes.client.dsl.Resource; 6 | 7 | import java.util.Map; 8 | 9 | public class KubernetesResourceDao extends AbstractKubernetesResourceDao implements ResourceDao { 10 | 11 | public KubernetesResourceDao(KubernetesClient client) { 12 | super(client); 13 | } 14 | 15 | @Override 16 | public Map getSecret(String namespace, String secretName) { 17 | Resource secret = client.secrets().inNamespace(namespace).withName(secretName); 18 | return secret.get().getData(); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/domain/ClusterWithActiveQueryCount.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.domain; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.model.Cluster; 4 | 5 | import java.io.Serializable; 6 | 7 | public class ClusterWithActiveQueryCount implements Serializable { 8 | 9 | private Cluster cluster; 10 | private TrinoActiveQueryCount trinoActiveQueryCount; 11 | 12 | 13 | public ClusterWithActiveQueryCount(Cluster cluster, TrinoActiveQueryCount trinoActiveQueryCount) { 14 | this.cluster = cluster; 15 | this.trinoActiveQueryCount = trinoActiveQueryCount; 16 | } 17 | 18 | public Cluster getCluster() { 19 | return cluster; 20 | } 21 | 22 | public TrinoActiveQueryCount getTrinoActiveQueryCount() { 23 | return trinoActiveQueryCount; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/helm/config/SpringContextSingleton.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.helm.config; 2 | 3 | 4 | import org.springframework.context.ApplicationContext; 5 | import org.springframework.context.annotation.AnnotationConfigApplicationContext; 6 | 7 | public class SpringContextSingleton { 8 | 9 | private static ApplicationContext applicationContext; 10 | 11 | private static final Object lock = new Object(); 12 | 13 | public static ApplicationContext getInstance() 14 | { 15 | if(applicationContext == null) { 16 | synchronized(lock) { 17 | if(applicationContext == null) { 18 | applicationContext = new AnnotationConfigApplicationContext(HelmConfig.class); 19 | } 20 | } 21 | } 22 | 23 | return applicationContext; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/dao/kubernetes/KubernetesResourceDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.dao.kubernetes; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.spark.api.dao.ResourceDao; 4 | import io.fabric8.kubernetes.client.KubernetesClient; 5 | import io.fabric8.kubernetes.client.dsl.Resource; 6 | 7 | import java.util.Map; 8 | 9 | public class KubernetesResourceDao extends AbstractKubernetesResourceDao implements ResourceDao { 10 | 11 | public KubernetesResourceDao(KubernetesClient client) { 12 | super(client); 13 | } 14 | 15 | @Override 16 | public Map getSecret(String namespace, String secretName) { 17 | Resource secret = client.secrets().inNamespace(namespace).withName(secretName); 18 | return secret.get().getData(); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/config/SpringContextSingleton.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.config; 2 | 3 | 4 | import org.springframework.context.ApplicationContext; 5 | import org.springframework.context.annotation.AnnotationConfigApplicationContext; 6 | 7 | public class SpringContextSingleton { 8 | 9 | private static ApplicationContext applicationContext; 10 | 11 | private static final Object lock = new Object(); 12 | 13 | public static ApplicationContext getInstance() 14 | { 15 | if(applicationContext == null) { 16 | synchronized(lock) { 17 | if(applicationContext == null) { 18 | applicationContext = new AnnotationConfigApplicationContext(APIConfig.class); 19 | } 20 | } 21 | } 22 | 23 | return applicationContext; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /common/src/main/java/com/cloudcheflabs/dataroaster/common/util/TemplateUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.common.util; 2 | 3 | import com.hubspot.jinjava.Jinjava; 4 | 5 | import java.util.Map; 6 | 7 | public class TemplateUtils { 8 | 9 | public static String replace(String templatePath, boolean fromClasspath, Map kv) { 10 | String templateString = FileUtils.fileToString(templatePath, fromClasspath); 11 | return replace(templateString, kv); 12 | } 13 | 14 | public static String replace(String templateString, Map kv) { 15 | Jinjava jinjava = new Jinjava(); 16 | return jinjava.render(templateString, kv); 17 | } 18 | 19 | public static void toFile(String templatePath, boolean fromClasspath, Map kv, String targetFilePath, boolean executable) { 20 | FileUtils.stringToFile(replace(templatePath, fromClasspath, kv), targetFilePath, executable); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/test/java/com/cloudcheflabs/dataroaster/operators/dataroaster/test/SpringBootTestRunnerBase.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.test; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.dataroaster.DataRoasterApplication; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | import org.springframework.boot.SpringApplication; 7 | import org.springframework.context.ConfigurableApplicationContext; 8 | 9 | import java.util.Arrays; 10 | 11 | public class SpringBootTestRunnerBase { 12 | 13 | private static Logger LOG = LoggerFactory.getLogger(SpringBootTestRunnerBase.class); 14 | 15 | protected static ConfigurableApplicationContext applicationContext; 16 | 17 | protected static void init() throws Exception { 18 | applicationContext = 19 | SpringApplication.run(DataRoasterApplication.class, Arrays.asList("").toArray(new String[0])); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/crd/ValueFrom.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.crd; 2 | 3 | public class ValueFrom { 4 | 5 | private SecretKeyRef secretKeyRef; 6 | 7 | public SecretKeyRef getSecretKeyRef() { 8 | return secretKeyRef; 9 | } 10 | 11 | public void setSecretKeyRef(SecretKeyRef secretKeyRef) { 12 | this.secretKeyRef = secretKeyRef; 13 | } 14 | 15 | public static class SecretKeyRef { 16 | private String name; 17 | private String key; 18 | 19 | public String getName() { 20 | return name; 21 | } 22 | 23 | public void setName(String name) { 24 | this.name = name; 25 | } 26 | 27 | public String getKey() { 28 | return key; 29 | } 30 | 31 | public void setKey(String key) { 32 | this.key = key; 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/test/java/com/cloudcheflabs/dataroaster/operators/dataroaster/util/TokenUtilsTestRunner.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.util; 2 | 3 | import org.junit.Test; 4 | import org.slf4j.Logger; 5 | import org.slf4j.LoggerFactory; 6 | 7 | import java.util.Base64; 8 | 9 | public class TokenUtilsTestRunner { 10 | 11 | private static Logger LOG = LoggerFactory.getLogger(TokenUtilsTestRunner.class); 12 | 13 | @Test 14 | public void createNewToken() throws Exception { 15 | String generatedString = RandomUtils.randomText(); 16 | LOG.info("generatedString: [{}]", generatedString); 17 | 18 | String bcrypted = BCryptUtils.encodeWithBCrypt(generatedString); 19 | LOG.info("bcrypted: [{}]", bcrypted); 20 | 21 | String encodedString = Base64.getEncoder().encodeToString(bcrypted.getBytes()); 22 | LOG.info("encodedString: [{}]", encodedString); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/kubernetes/client/KubernetesClientUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.kubernetes.client; 2 | 3 | import io.fabric8.kubernetes.client.Config; 4 | import io.fabric8.kubernetes.client.DefaultKubernetesClient; 5 | import io.fabric8.kubernetes.client.KubernetesClient; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | 9 | public class KubernetesClientUtils { 10 | 11 | private static Logger LOG = LoggerFactory.getLogger(KubernetesClientUtils.class); 12 | 13 | public static KubernetesClient newClientWithKubeconfig(String contents) { 14 | try { 15 | Config config = Config.fromKubeconfig(contents); 16 | return new DefaultKubernetesClient(config); 17 | } catch (Exception e) { 18 | e.printStackTrace(); 19 | throw new RuntimeException(e); 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8' 4 | 5 | RUN yum install -y tzdata openssl curl ca-certificates fontconfig gzip tar \ 6 | && yum update -y; yum clean all 7 | 8 | 9 | # install jdk. 10 | 11 | RUN set -eux; \ 12 | yum install java-11-openjdk java-11-openjdk-devel -y; 13 | 14 | 15 | # install trino operator. 16 | ENV TRINO_OPERATOR_HOME /opt/trino-operator 17 | ENV TRINO_OPERATOR_USER trino 18 | 19 | RUN useradd -ms /bin/bash -d ${TRINO_OPERATOR_HOME} ${TRINO_OPERATOR_USER} 20 | 21 | # add trino operator jar. 22 | ARG TRINO_OPERATOR_JAR 23 | ADD ${TRINO_OPERATOR_JAR} ${TRINO_OPERATOR_HOME} 24 | 25 | # add trino run shell. 26 | ADD run-trino-operator.sh ${TRINO_OPERATOR_HOME} 27 | 28 | # add permissions. 29 | RUN chmod +x ${TRINO_OPERATOR_HOME}/*.sh 30 | RUN chown ${TRINO_OPERATOR_USER}: -R ${TRINO_OPERATOR_HOME} 31 | 32 | # change work directory. 33 | USER ${TRINO_OPERATOR_USER} 34 | WORKDIR ${TRINO_OPERATOR_HOME} 35 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/service/ClusterJmxServiceImpl.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.service; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.controller.api.dao.ClusterJmxDao; 4 | import com.cloudcheflabs.dataroaster.trino.controller.api.service.ClusterJmxService; 5 | import com.cloudcheflabs.dataroaster.trino.controller.domain.RestResponse; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.beans.factory.annotation.Qualifier; 8 | import org.springframework.stereotype.Component; 9 | 10 | @Component 11 | public class ClusterJmxServiceImpl implements ClusterJmxService { 12 | 13 | @Autowired 14 | @Qualifier("restClusterJmxDao") 15 | private ClusterJmxDao clusterJmxDao; 16 | 17 | @Override 18 | public RestResponse listClusterJmxEndpoints(String namespace, String restUri) { 19 | return clusterJmxDao.listClusterJmxEndpoints(namespace, restUri); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/component/SimpleHttpClient.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.component; 2 | 3 | 4 | import okhttp3.ConnectionPool; 5 | import okhttp3.OkHttpClient; 6 | 7 | import java.util.concurrent.TimeUnit; 8 | 9 | public class SimpleHttpClient { 10 | 11 | private OkHttpClient client; 12 | 13 | public SimpleHttpClient() 14 | { 15 | this.client = buildClient(); 16 | } 17 | 18 | public OkHttpClient getClient() 19 | { 20 | return this.client; 21 | } 22 | 23 | private OkHttpClient buildClient() { 24 | OkHttpClient.Builder builder = new OkHttpClient.Builder() 25 | .connectTimeout(600, TimeUnit.SECONDS) 26 | .readTimeout(600, TimeUnit.SECONDS) 27 | .writeTimeout(600, TimeUnit.SECONDS) 28 | .connectionPool(new ConnectionPool(5, 60, TimeUnit.SECONDS)); 29 | 30 | return builder.build(); 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/crd/Autoscaler.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.crd; 2 | 3 | public class Autoscaler { 4 | private int minReplicas; 5 | private int maxReplicas; 6 | private int targetCPUUtilizationPercentage; 7 | 8 | public int getMinReplicas() { 9 | return minReplicas; 10 | } 11 | 12 | public void setMinReplicas(int minReplicas) { 13 | this.minReplicas = minReplicas; 14 | } 15 | 16 | public int getMaxReplicas() { 17 | return maxReplicas; 18 | } 19 | 20 | public void setMaxReplicas(int maxReplicas) { 21 | this.maxReplicas = maxReplicas; 22 | } 23 | 24 | public int getTargetCPUUtilizationPercentage() { 25 | return targetCPUUtilizationPercentage; 26 | } 27 | 28 | public void setTargetCPUUtilizationPercentage(int targetCPUUtilizationPercentage) { 29 | this.targetCPUUtilizationPercentage = targetCPUUtilizationPercentage; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/component/SimpleHttpClient.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.component; 2 | 3 | 4 | import okhttp3.ConnectionPool; 5 | import okhttp3.OkHttpClient; 6 | 7 | import java.util.concurrent.TimeUnit; 8 | 9 | public class SimpleHttpClient { 10 | 11 | private OkHttpClient client; 12 | 13 | public SimpleHttpClient() 14 | { 15 | this.client = buildClient(); 16 | } 17 | 18 | public OkHttpClient getClient() 19 | { 20 | return this.client; 21 | } 22 | 23 | private OkHttpClient buildClient() { 24 | OkHttpClient.Builder builder = new OkHttpClient.Builder() 25 | .connectTimeout(600, TimeUnit.SECONDS) 26 | .readTimeout(600, TimeUnit.SECONDS) 27 | .writeTimeout(600, TimeUnit.SECONDS) 28 | .connectionPool(new ConnectionPool(5, 60, TimeUnit.SECONDS)); 29 | 30 | return builder.build(); 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/test/java/com/cloudcheflabs/dataroaster/trino/gateway/SampleTestRunner.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway; 2 | 3 | import org.junit.Test; 4 | import org.junit.runner.RunWith; 5 | import org.springframework.beans.factory.annotation.Autowired; 6 | import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; 7 | import org.springframework.boot.test.context.SpringBootTest; 8 | import org.springframework.test.context.TestPropertySource; 9 | import org.springframework.test.context.junit4.SpringRunner; 10 | import org.springframework.test.web.servlet.MockMvc; 11 | 12 | @RunWith(SpringRunner.class) 13 | @SpringBootTest(classes = TrinoGatewayApplication.class) 14 | @TestPropertySource(locations = "classpath:application-test.properties") 15 | @AutoConfigureMockMvc 16 | public class SampleTestRunner { 17 | 18 | @Autowired 19 | private MockMvc mvc; 20 | 21 | @Test 22 | public void run() throws Exception { 23 | // test something. 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/test/java/com/cloudcheflabs/dataroaster/trino/gateway/component/SimpleHttpClient.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.component; 2 | 3 | 4 | import okhttp3.ConnectionPool; 5 | import okhttp3.OkHttpClient; 6 | 7 | import java.util.concurrent.TimeUnit; 8 | 9 | public class SimpleHttpClient { 10 | 11 | private OkHttpClient client; 12 | 13 | 14 | public SimpleHttpClient() 15 | { 16 | this.client = buildClient(); 17 | } 18 | 19 | public OkHttpClient getClient() 20 | { 21 | return this.client; 22 | } 23 | 24 | 25 | private OkHttpClient buildClient() { 26 | OkHttpClient.Builder builder = new OkHttpClient.Builder() 27 | .connectTimeout(600, TimeUnit.SECONDS) 28 | .readTimeout(600, TimeUnit.SECONDS) 29 | .writeTimeout(600, TimeUnit.SECONDS) 30 | .connectionPool(new ConnectionPool(5, 60, TimeUnit.SECONDS)); 31 | 32 | return builder.build(); 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/test/java/com/cloudcheflabs/dataroaster/operators/trino/component/SimpleHttpClient.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.component; 2 | 3 | 4 | import okhttp3.ConnectionPool; 5 | import okhttp3.OkHttpClient; 6 | 7 | import java.util.concurrent.TimeUnit; 8 | 9 | public class SimpleHttpClient { 10 | 11 | private OkHttpClient client; 12 | 13 | 14 | public SimpleHttpClient() 15 | { 16 | this.client = buildClient(); 17 | } 18 | 19 | public OkHttpClient getClient() 20 | { 21 | return this.client; 22 | } 23 | 24 | 25 | private OkHttpClient buildClient() { 26 | OkHttpClient.Builder builder = new OkHttpClient.Builder() 27 | .connectTimeout(600, TimeUnit.SECONDS) 28 | .readTimeout(600, TimeUnit.SECONDS) 29 | .writeTimeout(600, TimeUnit.SECONDS) 30 | .connectionPool(new ConnectionPool(5, 60, TimeUnit.SECONDS)); 31 | 32 | return builder.build(); 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/component/SpringContextSingleton.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.component; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.trino.gateway.config.KubernetesClientConfigurer; 5 | import org.springframework.context.ApplicationContext; 6 | import org.springframework.context.annotation.AnnotationConfigApplicationContext; 7 | 8 | public class SpringContextSingleton { 9 | 10 | private static ApplicationContext applicationContext; 11 | 12 | private static final Object lock = new Object(); 13 | 14 | public static ApplicationContext getInstance() 15 | { 16 | if(applicationContext == null) { 17 | synchronized(lock) { 18 | if(applicationContext == null) { 19 | applicationContext = new AnnotationConfigApplicationContext(KubernetesClientConfigurer.class); 20 | } 21 | } 22 | } 23 | 24 | return applicationContext; 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /components/livy/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM cloudcheflabs/spark:v3.2.2 2 | 3 | 4 | # env. 5 | ENV LIVY_HOME /opt/livy 6 | ENV LIVY_USER livy 7 | ENV KUBECONFIG ${LIVY_HOME}/.kube/config 8 | 9 | 10 | RUN useradd -ms /bin/bash -d ${LIVY_HOME} ${LIVY_USER} 11 | 12 | 13 | # install livy. 14 | RUN set -eux; \ 15 | apt install -y unzip curl; \ 16 | mkdir -p ${LIVY_HOME}/.kube; \ 17 | cd ${LIVY_HOME}; \ 18 | curl -L -O https://dlcdn.apache.org/incubator/livy/0.7.1-incubating/apache-livy-0.7.1-incubating-bin.zip; \ 19 | unzip apache-livy-0.7.1-incubating-bin.zip; \ 20 | cp -rv apache-livy-0.7.1-incubating-bin/* .; \ 21 | rm -rf apache-livy-0.7.1-incubating-bin/; \ 22 | rm -rf apache-livy-0.7.1-incubating-bin.zip; 23 | 24 | # add run shell. 25 | ADD run-livy.sh ${LIVY_HOME} 26 | 27 | # add kubeconfig. 28 | ADD config ${LIVY_HOME}/.kube 29 | 30 | # add permissions. 31 | RUN chown ${LIVY_USER}: -R ${LIVY_HOME} 32 | 33 | # change work directory. 34 | USER ${LIVY_USER} 35 | RUN chmod +x ${LIVY_HOME}/*.sh 36 | WORKDIR ${LIVY_HOME} 37 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/test/java/com/cloudcheflabs/dataroaster/trino/controller/component/SpringContextSingleton.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.component; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.trino.controller.config.KubernetesClientConfigurer; 5 | import org.springframework.context.ApplicationContext; 6 | import org.springframework.context.annotation.AnnotationConfigApplicationContext; 7 | 8 | public class SpringContextSingleton { 9 | 10 | private static ApplicationContext applicationContext; 11 | 12 | private static final Object lock = new Object(); 13 | 14 | public static ApplicationContext getInstance() 15 | { 16 | if(applicationContext == null) { 17 | synchronized(lock) { 18 | if(applicationContext == null) { 19 | applicationContext = new AnnotationConfigApplicationContext(KubernetesClientConfigurer.class); 20 | } 21 | } 22 | } 23 | 24 | return applicationContext; 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/util/KubernetesUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.util; 2 | 3 | import com.cloudcheflabs.dataroaster.common.util.FileUtils; 4 | import com.cloudcheflabs.dataroaster.operators.spark.config.SparkConfiguration; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | 8 | public class KubernetesUtils { 9 | private static Logger LOG = LoggerFactory.getLogger(KubernetesUtils.class); 10 | 11 | public static String getNamespace() { 12 | try { 13 | String namespaceFile = "/var/run/secrets/kubernetes.io/serviceaccount/namespace"; 14 | return FileUtils.fileToString(namespaceFile, false); 15 | } catch (Exception e) { 16 | e.printStackTrace(); 17 | LOG.warn("instead return default spark operator namespace [{}]", SparkConfiguration.DEFAULT_SPARK_OPERATOR_NAMESPACE); 18 | return SparkConfiguration.DEFAULT_SPARK_OPERATOR_NAMESPACE; 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/util/KubernetesUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.util; 2 | 3 | import com.cloudcheflabs.dataroaster.common.util.FileUtils; 4 | import com.cloudcheflabs.dataroaster.operators.trino.config.TrinoConfiguration; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | 8 | public class KubernetesUtils { 9 | private static Logger LOG = LoggerFactory.getLogger(KubernetesUtils.class); 10 | 11 | public static String getNamespace() { 12 | try { 13 | String namespaceFile = "/var/run/secrets/kubernetes.io/serviceaccount/namespace"; 14 | return FileUtils.fileToString(namespaceFile, false); 15 | } catch (Exception e) { 16 | e.printStackTrace(); 17 | LOG.warn("instead return default trino operator namespace [{}]", TrinoConfiguration.DEFAULT_TRINO_OPERATOR_NAMESPACE); 18 | return TrinoConfiguration.DEFAULT_TRINO_OPERATOR_NAMESPACE; 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8' 4 | 5 | RUN yum install -y tzdata openssl curl ca-certificates fontconfig gzip tar \ 6 | && yum update -y; yum clean all 7 | 8 | 9 | # install jdk. 10 | 11 | RUN set -eux; \ 12 | yum install java-11-openjdk java-11-openjdk-devel -y; 13 | 14 | 15 | # install trino controller. 16 | ENV TRINO_CONTROLLER_HOME /opt/trino-controller 17 | ENV TRINO_CONTROLLER_USER trino 18 | 19 | RUN useradd -ms /bin/bash -d ${TRINO_CONTROLLER_HOME} ${TRINO_CONTROLLER_USER} 20 | 21 | # add trino controller jar. 22 | ARG TRINO_CONTROLLER_JAR 23 | ADD ${TRINO_CONTROLLER_JAR} ${TRINO_CONTROLLER_HOME} 24 | 25 | # add trino run shell. 26 | ADD run-trino-controller.sh ${TRINO_CONTROLLER_HOME} 27 | 28 | # add permissions. 29 | RUN chmod +x ${TRINO_CONTROLLER_HOME}/*.sh 30 | RUN chown ${TRINO_CONTROLLER_USER}: -R ${TRINO_CONTROLLER_HOME} 31 | 32 | # change work directory. 33 | USER ${TRINO_CONTROLLER_USER} 34 | WORKDIR ${TRINO_CONTROLLER_HOME} 35 | -------------------------------------------------------------------------------- /operators/helm/chart/templates/helm-charts.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apiextensions.k8s.io/v1 2 | kind: CustomResourceDefinition 3 | metadata: 4 | name: helmcharts.helm-operator.cloudchef-labs.com 5 | spec: 6 | group: helm-operator.cloudchef-labs.com 7 | names: 8 | plural: helmcharts 9 | singular: helmchart 10 | kind: HelmChart 11 | shortNames: 12 | - helmchart 13 | scope: Namespaced 14 | versions: 15 | - name: v1beta1 16 | served: true 17 | storage: true 18 | schema: 19 | openAPIV3Schema: 20 | type: object 21 | properties: 22 | spec: 23 | type: object 24 | properties: 25 | repo: 26 | type: string 27 | chartName: 28 | type: string 29 | name: 30 | type: string 31 | version: 32 | type: string 33 | namespace: 34 | type: string 35 | values: 36 | type: string 37 | 38 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/domain/TrinoActiveQueryCount.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.domain; 2 | 3 | import java.io.Serializable; 4 | 5 | public class TrinoActiveQueryCount implements Serializable { 6 | 7 | private String clusterName; 8 | private String coordinatorAddress; 9 | private int count; 10 | 11 | public TrinoActiveQueryCount() {} 12 | 13 | public String getClusterName() { 14 | return clusterName; 15 | } 16 | 17 | public void setClusterName(String clusterName) { 18 | this.clusterName = clusterName; 19 | } 20 | 21 | public String getCoordinatorAddress() { 22 | return coordinatorAddress; 23 | } 24 | 25 | public void setCoordinatorAddress(String coordinatorAddress) { 26 | this.coordinatorAddress = coordinatorAddress; 27 | } 28 | 29 | public int getCount() { 30 | return count; 31 | } 32 | 33 | public void setCount(int count) { 34 | this.count = count; 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/resources/application-dev.yml: -------------------------------------------------------------------------------- 1 | # spring boot server 2 | server: 3 | port: 8099 4 | servlet: 5 | context-path: / 6 | # trino proxy 7 | trino: 8 | proxy: 9 | port: 18080 10 | authentication: false 11 | tls: 12 | enabled: false 13 | keystorePath: /home/opc/keystore.jks 14 | keystorePass: changeit 15 | trustStorePath: /home/opc/.keystore 16 | trustStorePass: changeit 17 | threadPool: 18 | maxThreads: 100 19 | minThreads: 10 20 | idleTimeout: 120 21 | publicEndpoint: "https://trino-gateway-proxy-test.cloudchef-labs.com" 22 | operator: 23 | url: http://localhost:8092 24 | # jdbc 25 | jdbc: 26 | driverClassName: com.mysql.jdbc.Driver 27 | url: jdbc:mysql://localhost:3306/trino_proxy?useSSL=false 28 | user: trino 29 | pass: Trino123! 30 | 31 | # hibernate 32 | hibernate: 33 | dialect: org.hibernate.dialect.MySQL5Dialect 34 | show_sql: true 35 | globally_quoted_identifiers: true 36 | # redis 37 | redis: 38 | host: localhost 39 | port: 6379 40 | 41 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/component/SpringContextSingleton.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.component; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.config.KubernetesClientConfigurer; 5 | import org.springframework.context.ApplicationContext; 6 | import org.springframework.context.annotation.AnnotationConfigApplicationContext; 7 | 8 | public class SpringContextSingleton { 9 | 10 | private static ApplicationContext applicationContext; 11 | 12 | private static final Object lock = new Object(); 13 | 14 | public static ApplicationContext getInstance() 15 | { 16 | if(applicationContext == null) { 17 | synchronized(lock) { 18 | if(applicationContext == null) { 19 | applicationContext = new AnnotationConfigApplicationContext(KubernetesClientConfigurer.class); 20 | } 21 | } 22 | } 23 | 24 | return applicationContext; 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/config/FilterConfigurer.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.config; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.filter.AuthorizationFilter; 4 | import org.springframework.beans.factory.annotation.Autowired; 5 | import org.springframework.boot.web.servlet.FilterRegistrationBean; 6 | import org.springframework.context.annotation.Bean; 7 | import org.springframework.context.annotation.Configuration; 8 | 9 | @Configuration 10 | public class FilterConfigurer { 11 | 12 | @Autowired 13 | private AuthorizationFilter authorizationFilter; 14 | 15 | @Bean 16 | public FilterRegistrationBean authorizationFilterFilterRegistrationBean(){ 17 | FilterRegistrationBean registrationBean 18 | = new FilterRegistrationBean<>(); 19 | 20 | registrationBean.setFilter(authorizationFilter); 21 | registrationBean.addUrlPatterns("/*"); 22 | 23 | return registrationBean; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/resources/application-prod.yml: -------------------------------------------------------------------------------- 1 | # spring boot server 2 | server: 3 | port: 8099 4 | servlet: 5 | context-path: / 6 | # trino proxy 7 | trino: 8 | proxy: 9 | port: 18080 10 | authentication: false 11 | tls: 12 | enabled: false 13 | keystorePath: /home/opc/keystore.jks 14 | keystorePass: changeit 15 | trustStorePath: /home/opc/.keystore 16 | trustStorePass: changeit 17 | threadPool: 18 | maxThreads: 100 19 | minThreads: 10 20 | idleTimeout: 120 21 | publicEndpoint: "https://trino-gateway-proxy-test.cloudchef-labs.com" 22 | operator: 23 | url: http://trino-operator-service.trino-operator.svc:8092 24 | # jdbc 25 | jdbc: 26 | driverClassName: com.mysql.jdbc.Driver 27 | url: jdbc:mysql://localhost:3306/trino_proxy?useSSL=false 28 | user: trino 29 | pass: Trino123! 30 | 31 | # hibernate 32 | hibernate: 33 | dialect: org.hibernate.dialect.MySQL5Dialect 34 | show_sql: false 35 | globally_quoted_identifiers: true 36 | # redis 37 | redis: 38 | host: localhost 39 | port: 6379 -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/domain/RestResponse.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.domain; 2 | 3 | import java.io.Serializable; 4 | 5 | 6 | public class RestResponse implements Serializable { 7 | 8 | public static final int STATUS_OK = 200; 9 | 10 | private int statusCode; 11 | 12 | private String errorMessage; 13 | 14 | private String successMessage; 15 | 16 | 17 | public int getStatusCode() { 18 | return statusCode; 19 | } 20 | 21 | public void setStatusCode(int statusCode) { 22 | this.statusCode = statusCode; 23 | } 24 | 25 | public String getErrorMessage() { 26 | return errorMessage; 27 | } 28 | 29 | public void setErrorMessage(String errorMessage) { 30 | this.errorMessage = errorMessage; 31 | } 32 | 33 | public String getSuccessMessage() { 34 | return successMessage; 35 | } 36 | 37 | public void setSuccessMessage(String successMessage) { 38 | this.successMessage = successMessage; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/domain/CustomResource.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.domain; 2 | 3 | import java.io.Serializable; 4 | 5 | public class CustomResource implements Serializable { 6 | 7 | private String kind; 8 | private String name; 9 | private String namespace; 10 | private String yaml; 11 | 12 | public String getKind() { 13 | return kind; 14 | } 15 | 16 | public void setKind(String kind) { 17 | this.kind = kind; 18 | } 19 | 20 | public String getName() { 21 | return name; 22 | } 23 | 24 | public void setName(String name) { 25 | this.name = name; 26 | } 27 | 28 | public String getNamespace() { 29 | return namespace; 30 | } 31 | 32 | public void setNamespace(String namespace) { 33 | this.namespace = namespace; 34 | } 35 | 36 | public String getYaml() { 37 | return yaml; 38 | } 39 | 40 | public void setYaml(String yaml) { 41 | this.yaml = yaml; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/domain/CustomResource.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.domain; 2 | 3 | import java.io.Serializable; 4 | 5 | public class CustomResource implements Serializable { 6 | 7 | private String kind; 8 | private String name; 9 | private String namespace; 10 | private String yaml; 11 | 12 | public String getKind() { 13 | return kind; 14 | } 15 | 16 | public void setKind(String kind) { 17 | this.kind = kind; 18 | } 19 | 20 | public String getName() { 21 | return name; 22 | } 23 | 24 | public void setName(String name) { 25 | this.name = name; 26 | } 27 | 28 | public String getNamespace() { 29 | return namespace; 30 | } 31 | 32 | public void setNamespace(String namespace) { 33 | this.namespace = namespace; 34 | } 35 | 36 | public String getYaml() { 37 | return yaml; 38 | } 39 | 40 | public void setYaml(String yaml) { 41 | this.yaml = yaml; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/domain/RestResponse.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.domain; 2 | 3 | import java.io.Serializable; 4 | 5 | 6 | public class RestResponse implements Serializable { 7 | 8 | public static final int STATUS_OK = 200; 9 | 10 | private int statusCode; 11 | 12 | private String errorMessage; 13 | 14 | private String successMessage; 15 | 16 | 17 | public int getStatusCode() { 18 | return statusCode; 19 | } 20 | 21 | public void setStatusCode(int statusCode) { 22 | this.statusCode = statusCode; 23 | } 24 | 25 | public String getErrorMessage() { 26 | return errorMessage; 27 | } 28 | 29 | public void setErrorMessage(String errorMessage) { 30 | this.errorMessage = errorMessage; 31 | } 32 | 33 | public String getSuccessMessage() { 34 | return successMessage; 35 | } 36 | 37 | public void setSuccessMessage(String successMessage) { 38 | this.successMessage = successMessage; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/config/APIConfig.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.config; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.spark.api.dao.ResourceDao; 5 | import com.cloudcheflabs.dataroaster.operators.spark.dao.kubernetes.KubernetesResourceDao; 6 | import com.cloudcheflabs.dataroaster.operators.spark.handler.SparkApplicationClient; 7 | import io.fabric8.kubernetes.client.DefaultKubernetesClient; 8 | import io.fabric8.kubernetes.client.KubernetesClient; 9 | import org.springframework.context.annotation.Bean; 10 | import org.springframework.context.annotation.Configuration; 11 | 12 | @Configuration 13 | public class APIConfig { 14 | @Bean 15 | public ResourceDao resourceDao() { return new KubernetesResourceDao(kubernetesClient()); } 16 | 17 | @Bean 18 | public SparkApplicationClient sparkApplicationClient() { return new SparkApplicationClient(kubernetesClient()); } 19 | 20 | @Bean 21 | public KubernetesClient kubernetesClient() { return new DefaultKubernetesClient(); } 22 | 23 | } 24 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/config/FilterConfigurer.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.config; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.dataroaster.filter.AuthorizationFilter; 4 | import org.springframework.beans.factory.annotation.Autowired; 5 | import org.springframework.boot.web.servlet.FilterRegistrationBean; 6 | import org.springframework.context.annotation.Bean; 7 | import org.springframework.context.annotation.Configuration; 8 | 9 | @Configuration 10 | public class FilterConfigurer { 11 | 12 | @Autowired 13 | private AuthorizationFilter authorizationFilter; 14 | 15 | @Bean 16 | public FilterRegistrationBean authorizationFilterFilterRegistrationBean(){ 17 | FilterRegistrationBean registrationBean 18 | = new FilterRegistrationBean<>(); 19 | 20 | registrationBean.setFilter(authorizationFilter); 21 | registrationBean.addUrlPatterns("/*"); 22 | 23 | return registrationBean; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/helm/handler/HelmChartClient.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.helm.handler; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.helm.crd.HelmChart; 4 | import io.fabric8.kubernetes.api.model.KubernetesResourceList; 5 | import io.fabric8.kubernetes.client.KubernetesClient; 6 | import io.fabric8.kubernetes.client.dsl.MixedOperation; 7 | import io.fabric8.kubernetes.client.dsl.Resource; 8 | 9 | public class HelmChartClient { 10 | private KubernetesClient client; 11 | private MixedOperation, Resource> helmChartClient; 12 | 13 | public HelmChartClient(KubernetesClient client) { 14 | this.client = client; 15 | helmChartClient = client.resources(HelmChart.class); 16 | } 17 | 18 | public KubernetesClient getClient() { 19 | return client; 20 | } 21 | 22 | public MixedOperation, Resource> getHelmChartClient() { 23 | return helmChartClient; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /operators/trino/chart/templates/rbac.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: {{ .Values.serviceAccount }} 6 | namespace: {{ .Values.namespace }} 7 | --- 8 | apiVersion: rbac.authorization.k8s.io/v1 9 | kind: ClusterRole 10 | metadata: 11 | annotations: 12 | rbac.authorization.kubernetes.io/autoupdate: "true" 13 | labels: 14 | kubernetes.io/bootstrapping: rbac-defaults 15 | name: trino-operator 16 | rules: 17 | - apiGroups: 18 | - '*' 19 | resources: 20 | - '*' 21 | verbs: 22 | - '*' 23 | - nonResourceURLs: 24 | - '*' 25 | verbs: 26 | - '*' 27 | --- 28 | apiVersion: rbac.authorization.k8s.io/v1 29 | kind: ClusterRoleBinding 30 | metadata: 31 | annotations: 32 | rbac.authorization.kubernetes.io/autoupdate: "true" 33 | labels: 34 | kubernetes.io/bootstrapping: rbac-defaults 35 | name: trino-operator 36 | roleRef: 37 | apiGroup: rbac.authorization.k8s.io 38 | kind: ClusterRole 39 | name: trino-operator 40 | subjects: 41 | - kind: ServiceAccount 42 | name: {{ .Values.serviceAccount }} 43 | namespace: {{ .Values.namespace }} -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/domain/model/Components.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model; 2 | 3 | import com.google.common.collect.Sets; 4 | 5 | import javax.persistence.*; 6 | import java.io.Serializable; 7 | import java.util.Set; 8 | 9 | @Entity 10 | @Table(name = "components") 11 | public class Components implements Serializable { 12 | 13 | @Id 14 | @Column(name = "comp_name") 15 | private String compName; 16 | 17 | @OneToMany(mappedBy = "components", fetch = FetchType.EAGER) 18 | private Set customResourceSet = Sets.newHashSet(); 19 | 20 | public String getCompName() { 21 | return compName; 22 | } 23 | 24 | public void setCompName(String compName) { 25 | this.compName = compName; 26 | } 27 | 28 | public Set getCustomResourceSet() { 29 | return customResourceSet; 30 | } 31 | 32 | public void setCustomResourceSet(Set customResourceSet) { 33 | this.customResourceSet = customResourceSet; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /operators/helm/chart/templates/rbac.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: {{ .Values.serviceAccount }} 6 | namespace: {{ .Release.Namespace }} 7 | 8 | --- 9 | apiVersion: rbac.authorization.k8s.io/v1 10 | kind: ClusterRole 11 | metadata: 12 | annotations: 13 | rbac.authorization.kubernetes.io/autoupdate: "true" 14 | labels: 15 | kubernetes.io/bootstrapping: rbac-defaults 16 | name: helm-operator 17 | rules: 18 | - apiGroups: 19 | - '*' 20 | resources: 21 | - '*' 22 | verbs: 23 | - '*' 24 | - nonResourceURLs: 25 | - '*' 26 | verbs: 27 | - '*' 28 | 29 | 30 | --- 31 | apiVersion: rbac.authorization.k8s.io/v1 32 | kind: ClusterRoleBinding 33 | metadata: 34 | annotations: 35 | rbac.authorization.kubernetes.io/autoupdate: "true" 36 | labels: 37 | kubernetes.io/bootstrapping: rbac-defaults 38 | name: helm-operator 39 | roleRef: 40 | apiGroup: rbac.authorization.k8s.io 41 | kind: ClusterRole 42 | name: helm-operator 43 | subjects: 44 | - kind: ServiceAccount 45 | name: {{ .Values.serviceAccount }} 46 | namespace: {{ .Release.Namespace }} -------------------------------------------------------------------------------- /operators/spark/chart/templates/rbac.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: {{ .Values.serviceAccount }} 6 | namespace: {{ .Values.namespace }} 7 | 8 | 9 | --- 10 | apiVersion: rbac.authorization.k8s.io/v1 11 | kind: ClusterRole 12 | metadata: 13 | annotations: 14 | rbac.authorization.kubernetes.io/autoupdate: "true" 15 | labels: 16 | kubernetes.io/bootstrapping: rbac-defaults 17 | name: spark-operator 18 | rules: 19 | - apiGroups: 20 | - '*' 21 | resources: 22 | - '*' 23 | verbs: 24 | - '*' 25 | - nonResourceURLs: 26 | - '*' 27 | verbs: 28 | - '*' 29 | 30 | --- 31 | apiVersion: rbac.authorization.k8s.io/v1 32 | kind: ClusterRoleBinding 33 | metadata: 34 | annotations: 35 | rbac.authorization.kubernetes.io/autoupdate: "true" 36 | labels: 37 | kubernetes.io/bootstrapping: rbac-defaults 38 | name: spark-operator 39 | roleRef: 40 | apiGroup: rbac.authorization.k8s.io 41 | kind: ClusterRole 42 | name: spark-operator 43 | subjects: 44 | - kind: ServiceAccount 45 | name: {{ .Values.serviceAccount }} 46 | namespace: {{ .Values.namespace }} -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/chart/values.yaml: -------------------------------------------------------------------------------- 1 | namespace: trino-controller 2 | serviceAccount: trino-controller 3 | image: cloudcheflabs/trino-controller:v1.2.0 4 | imagePullPolicy: Always 5 | replicas: 1 6 | resources: 7 | requests: 8 | cpu: 200m 9 | memory: 1Gi 10 | limits: 11 | cpu: 300m 12 | memory: 1500Mi 13 | priorityClassName: "" 14 | annotations: null 15 | affinity: null 16 | tolerations: null 17 | nodeSelector: null 18 | hostNetwork: false 19 | dnsPolicy: ClusterFirst 20 | 21 | # helm operator. 22 | dataroasterhelmoperator: 23 | dnsPolicy: ClusterFirst 24 | 25 | # trino operator. 26 | dataroastertrinooperator: 27 | dnsPolicy: ClusterFirst 28 | 29 | 30 | # spring boot server 31 | server: 32 | port: 8093 33 | trino: 34 | operator: 35 | namespace: trino-operator 36 | restUri: "http://trino-operator-service.trino-operator.svc:8092" 37 | gateway: 38 | publicEndpoint: "https://trino-gw-proxy.example.com" 39 | proxyHostName: "trino-gw-proxy.example.com" 40 | restHostName: "trino-gw-rest.example.com" 41 | storageClass: "oci" 42 | restUri: "http://trino-gateway-rest-service.trino-gateway.svc:8099" 43 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/util/HttpUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.util; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import java.io.BufferedInputStream; 7 | import java.io.FileOutputStream; 8 | import java.io.IOException; 9 | import java.net.URL; 10 | 11 | public class HttpUtils { 12 | 13 | private static Logger LOG = LoggerFactory.getLogger(HttpUtils.class); 14 | 15 | public static void downloadFileFromHttpRest(String urlPath, String toFilePath) { 16 | try (BufferedInputStream in = new BufferedInputStream(new URL(urlPath).openStream()); 17 | FileOutputStream fileOutputStream = new FileOutputStream(toFilePath)) { 18 | byte dataBuffer[] = new byte[1024]; 19 | int bytesRead; 20 | while ((bytesRead = in.read(dataBuffer, 0, 1024)) != -1) { 21 | fileOutputStream.write(dataBuffer, 0, bytesRead); 22 | } 23 | } catch (IOException e) { 24 | e.printStackTrace(); 25 | LOG.error(e.getMessage()); 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/handler/TrinoClusterClient.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.handler; 2 | 3 | import com.cloudcheflabs.dataroaster.operators.trino.crd.TrinoCluster; 4 | import io.fabric8.kubernetes.api.model.KubernetesResourceList; 5 | import io.fabric8.kubernetes.client.KubernetesClient; 6 | import io.fabric8.kubernetes.client.dsl.MixedOperation; 7 | import io.fabric8.kubernetes.client.dsl.Resource; 8 | 9 | public class TrinoClusterClient { 10 | private KubernetesClient client; 11 | private MixedOperation, Resource> trinoClusterClient; 12 | 13 | public TrinoClusterClient(KubernetesClient client) { 14 | this.client = client; 15 | trinoClusterClient = client.resources(TrinoCluster.class); 16 | } 17 | 18 | public KubernetesClient getClient() { 19 | return client; 20 | } 21 | 22 | public MixedOperation, Resource> getTrinoClusterClient() { 23 | return trinoClusterClient; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/cache/TrinoResponseRedisCache.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.cache; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.CacheDao; 4 | import com.cloudcheflabs.dataroaster.trino.gateway.api.service.CacheService; 5 | import com.cloudcheflabs.dataroaster.trino.gateway.domain.TrinoResponse; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.beans.factory.annotation.Qualifier; 8 | import org.springframework.stereotype.Component; 9 | 10 | 11 | @Component 12 | public class TrinoResponseRedisCache implements CacheService { 13 | 14 | @Autowired 15 | @Qualifier("redisTrinoResponseCacheDao") 16 | private CacheDao trinoResponseCacheDao; 17 | 18 | 19 | @Override 20 | public void set(String id, TrinoResponse trinoResponse) { 21 | trinoResponseCacheDao.set(id, trinoResponse); 22 | } 23 | 24 | @Override 25 | public TrinoResponse get(String id, Class clazz) { 26 | return trinoResponseCacheDao.get(id, clazz); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /components/livy/chart/templates/rbac.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: {{ .Values.serviceAccount }} 6 | labels: 7 | app.kubernetes.io/name: livy 8 | namespace: {{ .Values.namespace }} 9 | 10 | --- 11 | apiVersion: rbac.authorization.k8s.io/v1 12 | kind: ClusterRole 13 | metadata: 14 | annotations: 15 | rbac.authorization.kubernetes.io/autoupdate: "true" 16 | labels: 17 | kubernetes.io/bootstrapping: rbac-defaults 18 | name: livy 19 | rules: 20 | - apiGroups: 21 | - '*' 22 | resources: 23 | - '*' 24 | verbs: 25 | - '*' 26 | - nonResourceURLs: 27 | - '*' 28 | verbs: 29 | - '*' 30 | 31 | 32 | --- 33 | apiVersion: rbac.authorization.k8s.io/v1 34 | kind: ClusterRoleBinding 35 | metadata: 36 | annotations: 37 | rbac.authorization.kubernetes.io/autoupdate: "true" 38 | labels: 39 | kubernetes.io/bootstrapping: rbac-defaults 40 | name: livy 41 | roleRef: 42 | apiGroup: rbac.authorization.k8s.io 43 | kind: ClusterRole 44 | name: livy 45 | subjects: 46 | - kind: ServiceAccount 47 | name: {{ .Values.serviceAccount }} 48 | namespace: {{ .Values.namespace }} -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/domain/TrinoResponse.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.domain; 2 | 3 | import java.io.Serializable; 4 | 5 | public class TrinoResponse implements Serializable { 6 | private String id; 7 | private String nextUri; 8 | 9 | private String infoUri; 10 | 11 | private String partialCancelUri; 12 | 13 | public String getPartialCancelUri() { 14 | return partialCancelUri; 15 | } 16 | 17 | public void setPartialCancelUri(String partialCancelUri) { 18 | this.partialCancelUri = partialCancelUri; 19 | } 20 | 21 | public String getInfoUri() { 22 | return infoUri; 23 | } 24 | 25 | public void setInfoUri(String infoUri) { 26 | this.infoUri = infoUri; 27 | } 28 | 29 | public String getId() { 30 | return id; 31 | } 32 | 33 | public void setId(String id) { 34 | this.id = id; 35 | } 36 | 37 | public String getNextUri() { 38 | return nextUri; 39 | } 40 | 41 | public void setNextUri(String nextUri) { 42 | this.nextUri = nextUri; 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /components/metabase/chart/values.yaml: -------------------------------------------------------------------------------- 1 | namespace: metabase 2 | serviceAccount: metabase 3 | image: cloudcheflabs/metabase:v0.44.3 4 | imagePullPolicy: Always 5 | replicas: 1 6 | resources: 7 | requests: 8 | cpu: 200m 9 | memory: 1Gi 10 | limits: 11 | cpu: 1000m 12 | memory: 5Gi 13 | priorityClassName: "" 14 | annotations: null 15 | affinity: {} 16 | tolerations: {} 17 | nodeSelector: {} 18 | hostNetwork: false 19 | dnsPolicy: ClusterFirst 20 | 21 | server: 22 | port: 3000 23 | 24 | db: 25 | type: mysql 26 | name: metabase 27 | port: 3306 28 | user: root 29 | password: mysqlpass123 30 | host: mysql-service.metabase.svc 31 | connectionUri: jdbc:mysql://mysql-service.metabase.svc:3306/metabase?useSSL=false&createDatabaseIfNotExist=true 32 | 33 | 34 | # ingress. 35 | ingress: 36 | enabled: false 37 | ingressClassName: nginx 38 | hostName: dataroaster-metabase.cloudchef-labs.com 39 | 40 | # cert-manager. 41 | certManager: 42 | clusterIssue: letsencrypt-prod 43 | 44 | # readiness probe. 45 | readiness: 46 | initialDelaySeconds: 15 47 | periodSeconds: 10 48 | 49 | # liveness probe. 50 | liveness: 51 | initialDelaySeconds: 3 52 | periodSeconds: 10 -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/helm/util/YamlUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.helm.util; 2 | 3 | import com.cloudcheflabs.dataroaster.common.util.JsonUtils; 4 | import com.fasterxml.jackson.databind.JsonNode; 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; 7 | 8 | import java.io.IOException; 9 | 10 | public class YamlUtils { 11 | 12 | public static String jsonToYaml(String json) { 13 | try { 14 | JsonNode jsonNodeTree = new ObjectMapper().readTree(json); 15 | return new YAMLMapper().writeValueAsString(jsonNodeTree); 16 | } catch (IOException e) { 17 | throw new RuntimeException(e); 18 | } 19 | } 20 | 21 | public static String objectToYaml(Object obj) { 22 | try { 23 | JsonNode jsonNodeTree = new ObjectMapper().readTree(JsonUtils.toJson(new ObjectMapper(), obj)); 24 | return new YAMLMapper().writeValueAsString(jsonNodeTree); 25 | } catch (IOException e) { 26 | throw new RuntimeException(e); 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/api/dao/RegisterClusterDao.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.api.dao; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.controller.domain.RestResponse; 4 | 5 | public interface RegisterClusterDao { 6 | RestResponse createClusterGroup(String restUri, String groupName); 7 | RestResponse listClusterGroup(String restUri); 8 | RestResponse deleteClusterGroup(String restUri, String groupName); 9 | 10 | RestResponse registerCluster(String restUri, String clusterName, String clusterType, String url, boolean activated, String groupName); 11 | RestResponse updateClusterActivated(String restUri, String clusterName, boolean activated); 12 | RestResponse listClusters(String restUri); 13 | RestResponse deregisterCluster(String restUri, String clusterName); 14 | 15 | RestResponse createUser(String restUri, String user, String password, String groupName); 16 | RestResponse updatePassword(String restUri, String user, String password); 17 | RestResponse listUsers(String restUri); 18 | RestResponse deleteUser(String restUri, String user); 19 | } 20 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/util/YamlUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.util; 2 | 3 | import com.cloudcheflabs.dataroaster.common.util.JsonUtils; 4 | import com.fasterxml.jackson.databind.JsonNode; 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; 7 | 8 | import java.io.IOException; 9 | 10 | public class YamlUtils { 11 | 12 | public static String jsonToYaml(String json) { 13 | try { 14 | JsonNode jsonNodeTree = new ObjectMapper().readTree(json); 15 | return new YAMLMapper().writeValueAsString(jsonNodeTree); 16 | } catch (IOException e) { 17 | throw new RuntimeException(e); 18 | } 19 | } 20 | 21 | public static String objectToYaml(Object obj) { 22 | try { 23 | JsonNode jsonNodeTree = new ObjectMapper().readTree(JsonUtils.toJson(new ObjectMapper(), obj)); 24 | return new YAMLMapper().writeValueAsString(jsonNodeTree); 25 | } catch (IOException e) { 26 | throw new RuntimeException(e); 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /components/metabase/chart/templates/rbac.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: {{ .Values.serviceAccount }} 6 | labels: 7 | app.kubernetes.io/name: metabase 8 | namespace: {{ .Values.namespace }} 9 | 10 | --- 11 | apiVersion: rbac.authorization.k8s.io/v1 12 | kind: ClusterRole 13 | metadata: 14 | annotations: 15 | rbac.authorization.kubernetes.io/autoupdate: "true" 16 | labels: 17 | kubernetes.io/bootstrapping: rbac-defaults 18 | name: metabase 19 | rules: 20 | - apiGroups: 21 | - '*' 22 | resources: 23 | - '*' 24 | verbs: 25 | - '*' 26 | - nonResourceURLs: 27 | - '*' 28 | verbs: 29 | - '*' 30 | 31 | 32 | --- 33 | apiVersion: rbac.authorization.k8s.io/v1 34 | kind: ClusterRoleBinding 35 | metadata: 36 | annotations: 37 | rbac.authorization.kubernetes.io/autoupdate: "true" 38 | labels: 39 | kubernetes.io/bootstrapping: rbac-defaults 40 | name: metabase 41 | roleRef: 42 | apiGroup: rbac.authorization.k8s.io 43 | kind: ClusterRole 44 | name: metabase 45 | subjects: 46 | - kind: ServiceAccount 47 | name: {{ .Values.serviceAccount }} 48 | namespace: {{ .Values.namespace }} -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/domain/model/Users.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.domain.model; 2 | 3 | import javax.persistence.*; 4 | import java.io.Serializable; 5 | 6 | @Entity 7 | @Table(name = "users") 8 | public class Users implements Serializable { 9 | 10 | @Id 11 | @Column(name = "user") 12 | private String user; 13 | 14 | @Column(name = "password") 15 | private String password; 16 | 17 | @ManyToOne 18 | @JoinColumn(name ="group_name") 19 | private ClusterGroup clusterGroup; 20 | 21 | 22 | 23 | public String getUser() { 24 | return user; 25 | } 26 | 27 | public void setUser(String user) { 28 | this.user = user; 29 | } 30 | 31 | public String getPassword() { 32 | return password; 33 | } 34 | 35 | public void setPassword(String password) { 36 | this.password = password; 37 | } 38 | 39 | public ClusterGroup getClusterGroup() { 40 | return clusterGroup; 41 | } 42 | 43 | public void setClusterGroup(ClusterGroup clusterGroup) { 44 | this.clusterGroup = clusterGroup; 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/main/java/com/cloudcheflabs/dataroaster/trino/controller/api/service/RegisterClusterService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.controller.api.service; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.controller.domain.RestResponse; 4 | 5 | public interface RegisterClusterService { 6 | RestResponse createClusterGroup(String restUri, String groupName); 7 | RestResponse listClusterGroup(String restUri); 8 | RestResponse deleteClusterGroup(String restUri, String groupName); 9 | 10 | RestResponse registerCluster(String restUri, String clusterName, String clusterType, String url, boolean activated, String groupName); 11 | RestResponse updateClusterActivated(String restUri, String clusterName, boolean activated); 12 | RestResponse listClusters(String restUri); 13 | RestResponse deregisterCluster(String restUri, String clusterName); 14 | 15 | RestResponse createUser(String restUri, String user, String password, String groupName); 16 | RestResponse updatePassword(String restUri, String user, String password); 17 | RestResponse listUsers(String restUri); 18 | RestResponse deleteUser(String restUri, String user); 19 | } 20 | -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/util/YamlUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.util; 2 | 3 | import com.cloudcheflabs.dataroaster.common.util.JsonUtils; 4 | import com.fasterxml.jackson.databind.JsonNode; 5 | import com.fasterxml.jackson.databind.ObjectMapper; 6 | import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; 7 | 8 | import java.io.IOException; 9 | 10 | public class YamlUtils { 11 | 12 | public static String jsonToYaml(String json) { 13 | try { 14 | JsonNode jsonNodeTree = new ObjectMapper().readTree(json); 15 | return new YAMLMapper().writeValueAsString(jsonNodeTree); 16 | } catch (IOException e) { 17 | throw new RuntimeException(e); 18 | } 19 | } 20 | 21 | public static String objectToYaml(Object obj) { 22 | try { 23 | JsonNode jsonNodeTree = new ObjectMapper().readTree(JsonUtils.toJson(new ObjectMapper(), obj)); 24 | return new YAMLMapper().writeValueAsString(jsonNodeTree); 25 | } catch (IOException e) { 26 | throw new RuntimeException(e); 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/spark/util/SparkApplicationExecutor.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.spark.util; 2 | 3 | import org.slf4j.Logger; 4 | import org.slf4j.LoggerFactory; 5 | 6 | import java.util.Arrays; 7 | import java.util.Date; 8 | import java.util.List; 9 | import java.util.concurrent.*; 10 | 11 | public class SparkApplicationExecutor { 12 | 13 | private static Logger LOG = LoggerFactory.getLogger(SparkApplicationExecutor.class); 14 | 15 | public static void runTask(Callable task) { 16 | ExecutorService executor = Executors.newFixedThreadPool(5); 17 | try { 18 | // execute task with timeout. 19 | List> futureList = 20 | executor.invokeAll(Arrays.asList(task), 15, TimeUnit.MINUTES); 21 | 22 | for (Future fut : futureList) { 23 | LOG.info(new Date() + "::" + fut.get()); 24 | } 25 | } catch (InterruptedException | ExecutionException e) { 26 | LOG.error(e.getMessage()); 27 | throw new RuntimeException(e); 28 | } 29 | 30 | executor.shutdown(); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /operators/spark/spark-operator/src/test/resources/cr/pi-py.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: "spark-operator.cloudchef-labs.com/v1alpha1" 2 | kind: SparkApplication 3 | metadata: 4 | name: pi-py 5 | namespace: spark-operator 6 | spec: 7 | core: 8 | applicationType: Batch 9 | deployMode: Client 10 | container: 11 | image: "cloudcheflabs/spark-py:v3.0.3" 12 | imagePullPolicy: Always 13 | applicationFileUrl: "s3a://mykidong/spark-app/pi.py" 14 | namespace: spark 15 | s3: 16 | bucket: mykidong 17 | accessKey: 18 | valueFrom: 19 | secretKeyRef: 20 | name: s3-secret 21 | key: accessKey 22 | secretKey: 23 | valueFrom: 24 | secretKeyRef: 25 | name: s3-secret 26 | key: secretKey 27 | endpoint: "https://any-s3-endpoint" 28 | driver: 29 | serviceAccountName: spark 30 | label: 31 | application-name: pi-py 32 | resources: 33 | cores: "1" 34 | limitCores: "1200m" 35 | memory: "512m" 36 | executor: 37 | instances: 1 38 | label: 39 | application-name: pi-py 40 | resources: 41 | cores: "1" 42 | limitCores: "1200m" 43 | memory: "1g" 44 | 45 | -------------------------------------------------------------------------------- /operators/trino/trino-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/trino/crd/Resources.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.trino.crd; 2 | 3 | public class Resources { 4 | private Requests requests; 5 | private Limits limits; 6 | 7 | public Requests getRequests() { 8 | return requests; 9 | } 10 | 11 | public void setRequests(Requests requests) { 12 | this.requests = requests; 13 | } 14 | 15 | public Limits getLimits() { 16 | return limits; 17 | } 18 | 19 | public void setLimits(Limits limits) { 20 | this.limits = limits; 21 | } 22 | 23 | public static class Requests { 24 | private String cpu; 25 | private String memory; 26 | 27 | public String getCpu() { 28 | return cpu; 29 | } 30 | 31 | public void setCpu(String cpu) { 32 | this.cpu = cpu; 33 | } 34 | 35 | public String getMemory() { 36 | return memory; 37 | } 38 | 39 | public void setMemory(String memory) { 40 | this.memory = memory; 41 | } 42 | } 43 | 44 | public static class Limits extends Requests { 45 | 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/service/common/AbstractService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.service.common; 2 | 3 | import com.cloudcheflabs.dataroaster.trino.gateway.api.dao.common.Operations; 4 | 5 | import java.io.Serializable; 6 | import java.util.List; 7 | 8 | public abstract class AbstractService implements Operations { 9 | 10 | @Override 11 | public T findOne(final String id) { 12 | return getDao().findOne(id); 13 | } 14 | 15 | @Override 16 | public List findAll() { 17 | return getDao().findAll(); 18 | } 19 | 20 | @Override 21 | public void create(final T entity) { 22 | getDao().create(entity); 23 | } 24 | 25 | @Override 26 | public T update(final T entity) { 27 | return getDao().update(entity); 28 | } 29 | 30 | @Override 31 | public void delete(final T entity) { 32 | getDao().delete(entity); 33 | } 34 | 35 | @Override 36 | public void deleteById(final String entityId) { 37 | getDao().deleteById(entityId); 38 | } 39 | 40 | protected abstract Operations getDao(); 41 | 42 | } 43 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/chart/templates/rbac.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: {{ .Values.serviceAccount }} 6 | labels: 7 | app.kubernetes.io/name: trino-gateway 8 | namespace: {{ .Release.Namespace }} 9 | 10 | --- 11 | apiVersion: rbac.authorization.k8s.io/v1 12 | kind: ClusterRole 13 | metadata: 14 | annotations: 15 | rbac.authorization.kubernetes.io/autoupdate: "true" 16 | labels: 17 | kubernetes.io/bootstrapping: rbac-defaults 18 | name: trino-gateway 19 | rules: 20 | - apiGroups: 21 | - '*' 22 | resources: 23 | - '*' 24 | verbs: 25 | - '*' 26 | - nonResourceURLs: 27 | - '*' 28 | verbs: 29 | - '*' 30 | 31 | 32 | --- 33 | apiVersion: rbac.authorization.k8s.io/v1 34 | kind: ClusterRoleBinding 35 | metadata: 36 | annotations: 37 | rbac.authorization.kubernetes.io/autoupdate: "true" 38 | labels: 39 | kubernetes.io/bootstrapping: rbac-defaults 40 | name: trino-gateway 41 | roleRef: 42 | apiGroup: rbac.authorization.k8s.io 43 | kind: ClusterRole 44 | name: trino-gateway 45 | subjects: 46 | - kind: ServiceAccount 47 | name: {{ .Values.serviceAccount }} 48 | namespace: {{ .Release.Namespace }} -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/domain/model/UserToken.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.domain.model; 2 | 3 | import javax.persistence.*; 4 | import java.io.Serializable; 5 | 6 | @Entity 7 | @Table(name = "user_token") 8 | public class UserToken implements Serializable { 9 | 10 | public static final int EXPIRATION_IN_HOUR = 10; 11 | 12 | @Id 13 | @Column(name = "token") 14 | private String token; 15 | 16 | @Column(name = "expiration") 17 | private long expiration; 18 | 19 | @ManyToOne 20 | @JoinColumn(name ="user") 21 | private Users users; 22 | 23 | public String getToken() { 24 | return token; 25 | } 26 | 27 | public void setToken(String token) { 28 | this.token = token; 29 | } 30 | 31 | public long getExpiration() { 32 | return expiration; 33 | } 34 | 35 | public void setExpiration(long expiration) { 36 | this.expiration = expiration; 37 | } 38 | 39 | public Users getUsers() { 40 | return users; 41 | } 42 | 43 | public void setUsers(Users users) { 44 | this.users = users; 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8' 4 | 5 | RUN yum install -y tzdata openssl curl ca-certificates fontconfig gzip tar \ 6 | && yum update -y; yum clean all 7 | 8 | 9 | # install jdk. 10 | 11 | RUN set -eux; \ 12 | yum install java-11-openjdk java-11-openjdk-devel -y; 13 | 14 | 15 | # install trino gateway. 16 | ENV TRINO_GATEWAY_HOME /opt/trino-gateway 17 | ENV TRINO_GATEWAY_USER trino 18 | 19 | RUN useradd -ms /bin/bash -d ${TRINO_GATEWAY_HOME} ${TRINO_GATEWAY_USER} 20 | 21 | 22 | # add trino gateway jar. 23 | ARG TRINO_GATEWAY_JAR 24 | ADD ${TRINO_GATEWAY_JAR} ${TRINO_GATEWAY_HOME} 25 | 26 | # add db schema sql. 27 | ARG DB_SCHEMA 28 | ADD ${DB_SCHEMA} ${TRINO_GATEWAY_HOME} 29 | 30 | # make conf directory. 31 | RUN mkdir -p ${TRINO_GATEWAY_HOME}/conf 32 | 33 | # add trino run shell. 34 | ADD run-trino-gateway.sh ${TRINO_GATEWAY_HOME} 35 | ADD create-db-schema.sh ${TRINO_GATEWAY_HOME} 36 | 37 | # add permissions. 38 | RUN chmod +x ${TRINO_GATEWAY_HOME}/*.sh 39 | RUN chown ${TRINO_GATEWAY_USER}: -R ${TRINO_GATEWAY_HOME} 40 | 41 | # change work directory. 42 | USER ${TRINO_GATEWAY_USER} 43 | WORKDIR ${TRINO_GATEWAY_HOME} 44 | -------------------------------------------------------------------------------- /operators/helm/helm-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/helm/util/FileUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.helm.util; 2 | 3 | import org.joda.time.DateTime; 4 | import org.joda.time.format.DateTimeFormat; 5 | import org.joda.time.format.DateTimeFormatter; 6 | 7 | import java.util.UUID; 8 | 9 | public class FileUtils { 10 | 11 | public static String createHelmTempDirectory() { 12 | DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyyMMddHHmmss"); 13 | String formattedDate = fmt.print(DateTime.now()); 14 | String uid = formattedDate + "-" + UUID.randomUUID().toString(); 15 | 16 | // base temp directory. 17 | String sparkAppBaseTempDir = System.getProperty("java.io.tmpdir") + "/helm"; 18 | 19 | // temp directory. 20 | String tempDirectory = sparkAppBaseTempDir + "/" + uid; 21 | 22 | // create temp. directory. 23 | com.cloudcheflabs.dataroaster.common.util.FileUtils.createDirectory(tempDirectory); 24 | 25 | return tempDirectory; 26 | } 27 | 28 | public static void deleteDirectory(String path) { 29 | com.cloudcheflabs.dataroaster.common.util.FileUtils.deleteDirectory(path); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /components/hive/spark-thrift-server/src/test/resources/data/test.json: -------------------------------------------------------------------------------- 1 | {"itemId":"any-item-id0","quantity":2,"price":1000,"baseProperties":{"uid":"any-uid0","eventType":"cart-event","version":"7.0","ts":1527304486873}} 2 | {"itemId":"any-item-id0","quantity":2,"price":1000,"baseProperties":{"uid":"any-uid0","eventType":"cart-event","version":"7.0","ts":1527304486873}} 3 | {"itemId":"any-item-id0","quantity":2,"price":1000,"baseProperties":{"uid":"any-uid0","eventType":"cart-event","version":"7.0","ts":1527304486873}} 4 | {"itemId":"any-item-id0","quantity":2,"price":1000,"baseProperties":{"uid":"any-uid0","eventType":"cart-event","version":"7.0","ts":1527304486873}} 5 | {"itemId":"any-item-id0","quantity":2,"price":1000,"baseProperties":{"uid":"any-uid0","eventType":"cart-event","version":"7.0","ts":1527304486873}} 6 | {"itemId":"any-item-id0","quantity":2,"price":1000,"baseProperties":{"uid":"any-uid0","eventType":"cart-event","version":"7.0","ts":1527304486873}} 7 | {"itemId":"any-item-id0","quantity":2,"price":1000,"baseProperties":{"uid":"any-uid0","eventType":"cart-event","version":"7.0","ts":1527304486873}} 8 | {"itemId":"any-item-id0","quantity":2,"price":1000,"baseProperties":{"uid":"any-uid0","eventType":"cart-event","version":"7.0","ts":1527304486873}} -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/chart/templates/rbac.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: {{ .Values.serviceAccount }} 6 | labels: 7 | app.kubernetes.io/name: trino-controller 8 | namespace: {{ .Values.namespace }} 9 | 10 | --- 11 | apiVersion: rbac.authorization.k8s.io/v1 12 | kind: ClusterRole 13 | metadata: 14 | annotations: 15 | rbac.authorization.kubernetes.io/autoupdate: "true" 16 | labels: 17 | kubernetes.io/bootstrapping: rbac-defaults 18 | name: trino-controller 19 | rules: 20 | - apiGroups: 21 | - '*' 22 | resources: 23 | - '*' 24 | verbs: 25 | - '*' 26 | - nonResourceURLs: 27 | - '*' 28 | verbs: 29 | - '*' 30 | 31 | 32 | --- 33 | apiVersion: rbac.authorization.k8s.io/v1 34 | kind: ClusterRoleBinding 35 | metadata: 36 | annotations: 37 | rbac.authorization.kubernetes.io/autoupdate: "true" 38 | labels: 39 | kubernetes.io/bootstrapping: rbac-defaults 40 | name: trino-controller 41 | roleRef: 42 | apiGroup: rbac.authorization.k8s.io 43 | kind: ClusterRole 44 | name: trino-controller 45 | subjects: 46 | - kind: ServiceAccount 47 | name: {{ .Values.serviceAccount }} 48 | namespace: {{ .Values.namespace }} -------------------------------------------------------------------------------- /operators/dataroaster/chart/templates/rbac.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: {{ .Values.serviceAccount }} 6 | labels: 7 | app.kubernetes.io/name: dataroaster-operator 8 | namespace: {{ .Release.Namespace }} 9 | 10 | --- 11 | apiVersion: rbac.authorization.k8s.io/v1 12 | kind: ClusterRole 13 | metadata: 14 | annotations: 15 | rbac.authorization.kubernetes.io/autoupdate: "true" 16 | labels: 17 | kubernetes.io/bootstrapping: rbac-defaults 18 | name: dataroaster-operator 19 | rules: 20 | - apiGroups: 21 | - '*' 22 | resources: 23 | - '*' 24 | verbs: 25 | - '*' 26 | - nonResourceURLs: 27 | - '*' 28 | verbs: 29 | - '*' 30 | 31 | 32 | --- 33 | apiVersion: rbac.authorization.k8s.io/v1 34 | kind: ClusterRoleBinding 35 | metadata: 36 | annotations: 37 | rbac.authorization.kubernetes.io/autoupdate: "true" 38 | labels: 39 | kubernetes.io/bootstrapping: rbac-defaults 40 | name: dataroaster-operator 41 | roleRef: 42 | apiGroup: rbac.authorization.k8s.io 43 | kind: ClusterRole 44 | name: dataroaster-operator 45 | subjects: 46 | - kind: ServiceAccount 47 | name: {{ .Values.serviceAccount }} 48 | namespace: {{ .Release.Namespace }} -------------------------------------------------------------------------------- /operators/helm/helm-operator/docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM centos:7 2 | 3 | ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' LC_ALL='en_US.UTF-8' 4 | 5 | RUN yum install -y tzdata openssl curl ca-certificates fontconfig gzip tar \ 6 | && yum update -y; yum clean all 7 | 8 | 9 | # install jdk. 10 | 11 | RUN set -eux; \ 12 | yum install java-11-openjdk java-11-openjdk-devel -y; 13 | 14 | 15 | # install helm operator. 16 | ENV HELM_OPERATOR_HOME /opt/helm-operator 17 | ENV HELM_OPERATOR_USER helm 18 | 19 | RUN useradd -ms /bin/bash -d ${HELM_OPERATOR_HOME} ${HELM_OPERATOR_USER} 20 | 21 | 22 | # add helm operator jar. 23 | ARG HELM_OPERATOR_JAR 24 | ADD ${HELM_OPERATOR_JAR} ${HELM_OPERATOR_HOME} 25 | 26 | # add helm run shell. 27 | ADD run-helm-operator.sh ${HELM_OPERATOR_HOME} 28 | 29 | # add permissions. 30 | RUN chmod +x ${HELM_OPERATOR_HOME}/*.sh 31 | RUN chown ${HELM_OPERATOR_USER}: -R ${HELM_OPERATOR_HOME} 32 | 33 | # install helm. 34 | RUN set -eux; \ 35 | curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash; \ 36 | helm 37 | 38 | # set kubeconfig env. 39 | ENV KUBECONFIG ${HELM_OPERATOR_HOME}/.kube/config 40 | 41 | # change work directory. 42 | USER ${HELM_OPERATOR_USER} 43 | WORKDIR ${HELM_OPERATOR_HOME} 44 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-gateway/src/main/java/com/cloudcheflabs/dataroaster/trino/gateway/util/TempFileUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.trino.gateway.util; 2 | 3 | import org.joda.time.DateTime; 4 | import org.joda.time.format.DateTimeFormat; 5 | import org.joda.time.format.DateTimeFormatter; 6 | 7 | import java.util.UUID; 8 | 9 | public class TempFileUtils { 10 | 11 | public static String createTempDirectory() { 12 | DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyyMMddHHmmss"); 13 | String formattedDate = fmt.print(DateTime.now()); 14 | String uid = formattedDate + "-" + UUID.randomUUID().toString(); 15 | 16 | // base temp directory. 17 | String sparkAppBaseTempDir = System.getProperty("java.io.tmpdir") + "/dataroaster"; 18 | 19 | // temp directory. 20 | String tempDirectory = sparkAppBaseTempDir + "/" + uid; 21 | 22 | // create temp. directory. 23 | com.cloudcheflabs.dataroaster.common.util.FileUtils.createDirectory(tempDirectory); 24 | 25 | return tempDirectory; 26 | } 27 | 28 | public static void deleteDirectory(String path) { 29 | com.cloudcheflabs.dataroaster.common.util.FileUtils.deleteDirectory(path); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /operators/helm/chart/templates/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: helm-operator 5 | labels: 6 | app: helm-operator 7 | spec: 8 | replicas: 1 9 | selector: 10 | matchLabels: 11 | app: helm-operator 12 | template: 13 | metadata: 14 | labels: 15 | app: helm-operator 16 | annotations: 17 | {{- toYaml $.Values.tolerations | nindent 8 }} 18 | spec: 19 | affinity: 20 | {{- toYaml $.Values.affinity | nindent 8 }} 21 | tolerations: 22 | {{- toYaml $.Values.tolerations | nindent 8 }} 23 | nodeSelector: 24 | {{- toYaml $.Values.nodeSelector | nindent 8 }} 25 | {{- if .Values.priorityClassName }} 26 | priorityClassName: {{ .Values.priorityClassName }} 27 | {{- end }} 28 | hostNetwork: {{ .Values.hostNetwork }} 29 | serviceAccountName: {{ .Values.serviceAccount }} 30 | dnsPolicy: {{ .Values.dnsPolicy }} 31 | containers: 32 | - name: helm-operator 33 | image: {{ .Values.image }} 34 | imagePullPolicy: {{ .Values.imagePullPolicy }} 35 | resources: 36 | {{- toYaml $.Values.resources | nindent 12 }} 37 | command: 38 | - "./run-helm-operator.sh" -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/service/common/AbstractService.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.service.common; 2 | 3 | 4 | import com.cloudcheflabs.dataroaster.operators.dataroaster.api.dao.common.Operations; 5 | 6 | import java.io.Serializable; 7 | import java.util.List; 8 | 9 | public abstract class AbstractService implements Operations { 10 | 11 | @Override 12 | public T findOne(final String id) { 13 | return getDao().findOne(id); 14 | } 15 | 16 | @Override 17 | public List findAll() { 18 | return getDao().findAll(); 19 | } 20 | 21 | @Override 22 | public void create(final T entity) { 23 | getDao().create(entity); 24 | } 25 | 26 | @Override 27 | public T update(final T entity) { 28 | return getDao().update(entity); 29 | } 30 | 31 | @Override 32 | public void delete(final T entity) { 33 | getDao().delete(entity); 34 | } 35 | 36 | @Override 37 | public void deleteById(final String entityId) { 38 | getDao().deleteById(entityId); 39 | } 40 | 41 | protected abstract Operations getDao(); 42 | 43 | } 44 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/test/resources/trino-pod-template/worker-pod-template.yaml: -------------------------------------------------------------------------------- 1 | affinity: 2 | nodeAffinity: 3 | requiredDuringSchedulingIgnoredDuringExecution: 4 | nodeSelectorTerms: 5 | - matchExpressions: 6 | - key: coordinator 7 | operator: In 8 | values: 9 | - "false" 10 | - key: worker 11 | operator: In 12 | values: 13 | - "true" 14 | - key: clusterName 15 | operator: In 16 | values: 17 | - "etl-1" 18 | - key: management 19 | operator: In 20 | values: 21 | - "false" 22 | topologyKey: topology.kubernetes.io/zone 23 | podAntiAffinity: 24 | preferredDuringSchedulingIgnoredDuringExecution: 25 | - weight: 100 26 | podAffinityTerm: 27 | labelSelector: 28 | matchExpressions: 29 | - key: component 30 | operator: In 31 | values: 32 | - worker 33 | topologyKey: topology.kubernetes.io/zone 34 | tolerations: 35 | - key: "clusterName" 36 | operator: "Equal" 37 | value: "etl-1" 38 | effect: "NoSchedule" -------------------------------------------------------------------------------- /operators/dataroaster/dataroaster-operator/src/main/java/com/cloudcheflabs/dataroaster/operators/dataroaster/util/TempFileUtils.java: -------------------------------------------------------------------------------- 1 | package com.cloudcheflabs.dataroaster.operators.dataroaster.util; 2 | 3 | import org.joda.time.DateTime; 4 | import org.joda.time.format.DateTimeFormat; 5 | import org.joda.time.format.DateTimeFormatter; 6 | 7 | import java.util.UUID; 8 | 9 | public class TempFileUtils { 10 | 11 | public static String createTempDirectory() { 12 | DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyyMMddHHmmss"); 13 | String formattedDate = fmt.print(DateTime.now()); 14 | String uid = formattedDate + "-" + UUID.randomUUID().toString(); 15 | 16 | // base temp directory. 17 | String sparkAppBaseTempDir = System.getProperty("java.io.tmpdir") + "/dataroaster"; 18 | 19 | // temp directory. 20 | String tempDirectory = sparkAppBaseTempDir + "/" + uid; 21 | 22 | // create temp. directory. 23 | com.cloudcheflabs.dataroaster.common.util.FileUtils.createDirectory(tempDirectory); 24 | 25 | return tempDirectory; 26 | } 27 | 28 | public static void deleteDirectory(String path) { 29 | com.cloudcheflabs.dataroaster.common.util.FileUtils.deleteDirectory(path); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /trino-ecosystem/trino-controller/src/test/resources/trino-pod-template/coordinator-pod-template.yaml: -------------------------------------------------------------------------------- 1 | affinity: 2 | nodeAffinity: 3 | requiredDuringSchedulingIgnoredDuringExecution: 4 | nodeSelectorTerms: 5 | - matchExpressions: 6 | - key: coordinator 7 | operator: In 8 | values: 9 | - "true" 10 | - key: worker 11 | operator: In 12 | values: 13 | - "false" 14 | - key: clusterName 15 | operator: In 16 | values: 17 | - "etl-1" 18 | - key: management 19 | operator: In 20 | values: 21 | - "false" 22 | topologyKey: topology.kubernetes.io/zone 23 | podAntiAffinity: 24 | preferredDuringSchedulingIgnoredDuringExecution: 25 | - weight: 100 26 | podAffinityTerm: 27 | labelSelector: 28 | matchExpressions: 29 | - key: component 30 | operator: In 31 | values: 32 | - coordinator 33 | topologyKey: topology.kubernetes.io/zone 34 | tolerations: 35 | - key: "clusterName" 36 | operator: "Equal" 37 | value: "etl-1" 38 | effect: "NoSchedule" --------------------------------------------------------------------------------