├── src ├── main │ ├── resources │ │ ├── templates │ │ │ ├── common │ │ │ │ ├── shell │ │ │ │ │ ├── post-install.mustache │ │ │ │ │ └── install.mustache │ │ │ │ ├── kerberos │ │ │ │ │ ├── kadm5.mustache │ │ │ │ │ ├── kdc.mustache │ │ │ │ │ └── krb5.mustache │ │ │ │ ├── json │ │ │ │ │ ├── create-repository.mustache │ │ │ │ │ ├── create-cluster.mustache │ │ │ │ │ └── blueprint.mustache │ │ │ │ └── network │ │ │ │ │ └── hosts.mustache │ │ │ ├── docker │ │ │ │ └── dockerfile.mustache │ │ │ └── vagrant │ │ │ │ └── vagrantfile.mustache │ │ └── application.properties │ └── java │ │ └── veil │ │ └── hdp │ │ └── vagrant │ │ └── generator │ │ ├── model │ │ ├── Component.java │ │ └── Arguments.java │ │ ├── service │ │ ├── FileService.java │ │ ├── DockerService.java │ │ ├── VagrantService.java │ │ └── AbstractFileService.java │ │ ├── GeneratorApplication.java │ │ ├── BuildDockerFile.java │ │ ├── BuildVagrantFile.java │ │ └── Constants.java └── test │ ├── resources │ ├── application.properties │ ├── application-hive.properties │ ├── application-hbase.properties │ ├── application-jdbc-http.properties │ ├── application-hive_large.properties │ ├── application-local_repo.properties │ ├── application-hive_secure.properties │ ├── application-spark_large.properties │ ├── application-hbase_secure.properties │ ├── application-spark_secure.properties │ ├── application-jdbc-binary-kerberos.properties │ ├── application-spark_secure_large.properties │ └── application-local.properties │ └── java │ └── veil │ └── hdp │ └── vagrant │ └── generator │ └── service │ ├── DockerServiceTest.java │ └── VagrantServiceTest.java ├── .gitignore ├── pom.xml └── README.md /src/main/resources/templates/common/shell/post-install.mustache: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | -------------------------------------------------------------------------------- /src/main/resources/templates/common/kerberos/kadm5.mustache: -------------------------------------------------------------------------------- 1 | */admin@{{realmUpper}} * -------------------------------------------------------------------------------- /src/main/java/veil/hdp/vagrant/generator/model/Component.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator.model; 2 | 3 | public enum Component { 4 | hive, spark, hbase 5 | } 6 | -------------------------------------------------------------------------------- /src/main/resources/templates/common/json/create-repository.mustache: -------------------------------------------------------------------------------- 1 | { 2 | "Repositories": { 3 | "base_url": "{{url}}", 4 | "verify_base_url": false 5 | } 6 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.log 2 | .vagrant 3 | *.iml 4 | .idea/ 5 | /out 6 | /target 7 | 8 | /src/test/resources/application-hdpc.properties 9 | /src/test/resources/application-hdpck.properties 10 | -------------------------------------------------------------------------------- /src/main/java/veil/hdp/vagrant/generator/service/FileService.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator.service; 2 | 3 | import veil.hdp.vagrant.generator.model.Arguments; 4 | 5 | public interface FileService { 6 | void buildFile(Arguments arguments); 7 | } 8 | -------------------------------------------------------------------------------- /src/main/resources/templates/common/network/hosts.mustache: -------------------------------------------------------------------------------- 1 | 127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4 2 | ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 3 | 4 | {{arguments.ip}} {{arguments.fqdn}} {{arguments.hostname}} 5 | 6 | {{#arguments.customRepoEnabled}} 7 | {{arguments.customRepoIp}} {{arguments.customRepoFqdn}} 8 | {{/arguments.customRepoEnabled}} -------------------------------------------------------------------------------- /src/main/java/veil/hdp/vagrant/generator/GeneratorApplication.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | 6 | @SpringBootApplication 7 | public class GeneratorApplication { 8 | 9 | public static void main(String[] args) { 10 | 11 | SpringApplication app = new SpringApplication(GeneratorApplication.class); 12 | 13 | app.setWebEnvironment(false); 14 | 15 | app.run(args); 16 | } 17 | 18 | } 19 | -------------------------------------------------------------------------------- /src/main/resources/templates/common/kerberos/kdc.mustache: -------------------------------------------------------------------------------- 1 | [kdcdefaults] 2 | kdc_ports = 88 3 | kdc_tcp_ports = 88 4 | 5 | [realms] 6 | {{realmUpper}} = { 7 | #master_key_type = aes256-cts 8 | acl_file = /var/kerberos/krb5kdc/kadm5.acl 9 | dict_file = /usr/share/dict/words 10 | admin_keytab = /var/kerberos/krb5kdc/kadm5.keytab 11 | supported_enctypes = aes256-cts:normal aes128-cts:normal des3-hmac-sha1:normal arcfour-hmac:normal camellia256-cts:normal camellia128-cts:normal des-hmac-sha1:normal des-cbc-md5:normal des-cbc-crc:normal 12 | } -------------------------------------------------------------------------------- /src/test/resources/application.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=test.hdp.local 6 | vm.hostname=test 7 | vm.ip=192.168.7.201 8 | vm.memory=4096 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=false 16 | hdp.kerberos.realm= 17 | hdp.components=hive 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-hive.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=hive.hdp.local 6 | vm.hostname=hive 7 | vm.ip=192.168.7.201 8 | vm.memory=8192 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=false 16 | hdp.kerberos.realm= 17 | hdp.components=hive 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-hbase.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=hbase.hdp.local 6 | vm.hostname=hbase 7 | vm.ip=192.168.7.202 8 | vm.memory=8192 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=false 16 | hdp.kerberos.realm= 17 | hdp.components=hbase 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-jdbc-http.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=jdbc-http.hdp.local 6 | vm.hostname=jdbc-http 7 | vm.ip=192.168.7.201 8 | vm.memory=8192 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=false 16 | hdp.kerberos.realm= 17 | hdp.components=hive 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-hive_large.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=hive-large.hdp.local 6 | vm.hostname=hive-large 7 | vm.ip=192.168.7.201 8 | vm.memory=32768 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=false 16 | hdp.kerberos.realm= 17 | hdp.components=hive 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-local_repo.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=local-repo.hdp.local 6 | vm.hostname=local-repo 7 | vm.ip=192.168.7.201 8 | vm.memory=8192 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=false 16 | hdp.kerberos.realm= 17 | hdp.components=hive 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-hive_secure.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=hive-secure.hdp.local 6 | vm.hostname=hive-secure 7 | vm.ip=192.168.7.201 8 | vm.memory=8192 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=true 16 | hdp.kerberos.realm=HDP.LOCAL 17 | hdp.components=hive 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-spark_large.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=spark-large.hdp.local 6 | vm.hostname=spark-large 7 | vm.ip=192.168.7.201 8 | vm.memory=16384 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=false 16 | hdp.kerberos.realm= 17 | hdp.components=hive,spark 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-hbase_secure.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=hbase-secure.hdp.local 6 | vm.hostname=hbase-secure 7 | vm.ip=192.168.7.203 8 | vm.memory=8192 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=true 16 | hdp.kerberos.realm=HDP.LOCAL 17 | hdp.components=hbase 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-spark_secure.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=spark-secure.hdp.local 6 | vm.hostname=spark-secure 7 | vm.ip=192.168.7.201 8 | vm.memory=8192 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=true 16 | hdp.kerberos.realm=HDP.LOCAL 17 | hdp.components=hive,spark 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/main/resources/templates/common/kerberos/krb5.mustache: -------------------------------------------------------------------------------- 1 | [logging] 2 | default = FILE:/var/log/krb5libs.log 3 | kdc = FILE:/var/log/krb5kdc.log 4 | admin_server = FILE:/var/log/kadmind.log 5 | 6 | [libdefaults] 7 | default_realm = {{realmUpper}} 8 | dns_lookup_realm = false 9 | dns_lookup_kdc = false 10 | ticket_lifetime = 24h 11 | renew_lifetime = 7d 12 | forwardable = true 13 | 14 | [realms] 15 | {{realmUpper}} = { 16 | kdc = {{arguments.fqdn}} 17 | admin_server = {{arguments.fqdn}} 18 | } 19 | 20 | [domain_realm] 21 | .{{realmLower}} = {{realmUpper}} 22 | {{realmLower}} = {{realmUpper}} -------------------------------------------------------------------------------- /src/test/resources/application-jdbc-binary-kerberos.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=jdbc-binary-kerberos.hdp.local 6 | vm.hostname=jdbc-binary-kerberos 7 | vm.ip=192.168.7.202 8 | vm.memory=8192 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=true 16 | hdp.kerberos.realm=HDP.LOCAL 17 | hdp.components=hive 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-spark_secure_large.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=spark-secure-large.hdp.local 6 | vm.hostname=spark-secure-large 7 | vm.ip=192.168.7.201 8 | vm.memory=16384 9 | vm.cores=6 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.6 14 | hdp.ambari.version=2.6.0.0 15 | hdp.kerberos.enabled=true 16 | hdp.kerberos.realm=HDP.LOCAL 17 | hdp.components=hive,spark 18 | 19 | custom.repo.enabled=false 20 | custom.repo.fqdn= 21 | custom.repo.ip= 22 | custom.repo.ambari.url= 23 | custom.repo.hdp.url= 24 | custom.repo.hdp-utils.url= -------------------------------------------------------------------------------- /src/test/resources/application-local.properties: -------------------------------------------------------------------------------- 1 | logging.level.root=INFO 2 | logging.level.veil=DEBUG 3 | logging.file=hdp-vagrant-generator-test.log 4 | 5 | vm.fqdn=hive.hdp.local 6 | vm.hostname=hive 7 | vm.ip=192.168.7.201 8 | vm.memory=8192 9 | vm.cores=4 10 | vm.disks=1 11 | vm.update.yum=false 12 | 13 | hdp.stack.version=2.5 14 | hdp.ambari.version=2.4.2.0 15 | hdp.kerberos.enabled=false 16 | hdp.kerberos.realm= 17 | hdp.components=hive 18 | 19 | custom.repo.enabled=true 20 | custom.repo.fqdn=repo.hdp.local 21 | custom.repo.ip=192.168.7.101 22 | custom.repo.ambari.url=http://repo.hdp.local/repos/centos7/ambari/2.4.2.0/ambari.repo 23 | custom.repo.hdp.url=http://repo.hdp.local/hdp/centos7/HDP-2.5.3.0 24 | custom.repo.hdp-utils.url=http://repo.hdp.local/hdp/centos7/HDP-UTILS-1.1.0.21 -------------------------------------------------------------------------------- /src/main/resources/templates/common/json/create-cluster.mustache: -------------------------------------------------------------------------------- 1 | { 2 | "blueprint" : "{{blueprintName}}", 3 | "default_password" : "password", 4 | {{#arguments.kerberosEnabled}} 5 | "credentials" : [ 6 | { 7 | "alias" : "kdc.admin.credential", 8 | "principal" : "{{kdcAdmin}}", 9 | "key" : "{{kdcPassword}}", 10 | "type" : "TEMPORARY" 11 | } 12 | ], 13 | "security" : { 14 | "type" : "KERBEROS" 15 | }, 16 | {{/arguments.kerberosEnabled}} 17 | "host_groups" :[ 18 | { 19 | "name" : "host_group_1", 20 | "hosts" : [ 21 | { 22 | "fqdn" : "{{arguments.fqdn}}" 23 | } 24 | ] 25 | } 26 | ] 27 | } 28 | -------------------------------------------------------------------------------- /src/main/java/veil/hdp/vagrant/generator/BuildDockerFile.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.boot.CommandLineRunner; 5 | import org.springframework.context.annotation.Profile; 6 | import org.springframework.core.env.Environment; 7 | import org.springframework.stereotype.Component; 8 | import veil.hdp.vagrant.generator.model.Arguments; 9 | import veil.hdp.vagrant.generator.service.DockerService; 10 | 11 | @Component 12 | @Profile("!test") 13 | public class BuildDockerFile implements CommandLineRunner { 14 | 15 | @Autowired 16 | private Environment environment; 17 | 18 | @Autowired 19 | private DockerService dockerService; 20 | 21 | @Override 22 | public void run(String... args) throws Exception { 23 | Arguments arguments = new Arguments(environment); 24 | dockerService.buildFile(arguments); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/veil/hdp/vagrant/generator/BuildVagrantFile.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator; 2 | 3 | import org.springframework.context.annotation.Profile; 4 | import veil.hdp.vagrant.generator.model.Arguments; 5 | import veil.hdp.vagrant.generator.service.VagrantService; 6 | import org.springframework.beans.factory.annotation.Autowired; 7 | import org.springframework.boot.CommandLineRunner; 8 | import org.springframework.core.env.Environment; 9 | import org.springframework.stereotype.Component; 10 | 11 | @Component 12 | @Profile("!test") 13 | public class BuildVagrantFile implements CommandLineRunner { 14 | 15 | @Autowired 16 | private Environment environment; 17 | 18 | @Autowired 19 | private VagrantService vagrantService; 20 | 21 | @Override 22 | public void run(String... args) throws Exception { 23 | Arguments arguments = new Arguments(environment); 24 | vagrantService.buildFile(arguments); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/main/resources/templates/docker/dockerfile.mustache: -------------------------------------------------------------------------------- 1 | FROM hortonworks/ambari-base 2 | 3 | RUN yum install wget curl ntp -y && yum clean all 4 | 5 | RUN yum history new{{#arguments.updateLibraries}} && yum update -y{{/arguments.updateLibraries}} && yum clean all 6 | 7 | RUN wget -nv {{arguments.ambariRepoFileUrl}} -O /etc/yum.repos.d/ambari.repo 8 | 9 | RUN yum install ambari-server ambari-agent{{#arguments.kerberosEnabled}} krb5-libs krb5-server krb5-workstation{{/arguments.kerberosEnabled}} -y && yum clean all 10 | 11 | RUN sed -i "s/^hostname=localhost/hostname={{arguments.fqdn}}/g" /etc/ambari-agent/conf/ambari-agent.ini 12 | 13 | {{#arguments.kerberosEnabled}} 14 | ADD krb5.conf /etc/krb5.conf 15 | ADD kdc.conf /var/kerberos/krb5kdc/kdc.conf 16 | ADD kadm5.acl /var/kerberos/krb5kdc/kadm5.acl 17 | 18 | RUN /usr/sbin/kdb5_util create -s -P password 19 | RUN /usr/sbin/kadmin.local -q 'addprinc -pw admin admin/admin' -w password 20 | RUN krb5kdc start 21 | #RUN kadmin start 22 | {{/arguments.kerberosEnabled}} 23 | 24 | 25 | RUN ambari-server setup -j /usr/jdk64/jdk1.7.0_67 26 | 27 | RUN ambari-server start && ambari-agent start -------------------------------------------------------------------------------- /src/test/java/veil/hdp/vagrant/generator/service/DockerServiceTest.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator.service; 2 | 3 | import org.junit.After; 4 | import org.junit.Before; 5 | import org.junit.Test; 6 | import org.junit.runner.RunWith; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.boot.test.context.SpringBootTest; 9 | import org.springframework.core.env.Environment; 10 | import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; 11 | import veil.hdp.vagrant.generator.GeneratorApplication; 12 | import veil.hdp.vagrant.generator.model.Arguments; 13 | 14 | @RunWith(SpringJUnit4ClassRunner.class) 15 | @SpringBootTest(classes = GeneratorApplication.class) 16 | public class DockerServiceTest { 17 | 18 | @Autowired 19 | private Environment environment; 20 | 21 | @Autowired 22 | private DockerService dockerService; 23 | 24 | private Arguments arguments; 25 | 26 | @Before 27 | public void setUp() throws Exception { 28 | arguments = new Arguments(environment); 29 | } 30 | 31 | @After 32 | public void tearDown() throws Exception { 33 | arguments = null; 34 | } 35 | 36 | @Test 37 | public void buildFile() throws Exception { 38 | dockerService.buildFile(arguments); 39 | } 40 | 41 | } -------------------------------------------------------------------------------- /src/test/java/veil/hdp/vagrant/generator/service/VagrantServiceTest.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator.service; 2 | 3 | import org.junit.After; 4 | import org.junit.Before; 5 | import org.junit.Test; 6 | import org.junit.runner.RunWith; 7 | import org.springframework.beans.factory.annotation.Autowired; 8 | import org.springframework.boot.test.context.SpringBootTest; 9 | import org.springframework.core.env.Environment; 10 | import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; 11 | import veil.hdp.vagrant.generator.GeneratorApplication; 12 | import veil.hdp.vagrant.generator.model.Arguments; 13 | 14 | @RunWith(SpringJUnit4ClassRunner.class) 15 | @SpringBootTest(classes = GeneratorApplication.class) 16 | public class VagrantServiceTest { 17 | 18 | @Autowired 19 | private Environment environment; 20 | 21 | @Autowired 22 | private VagrantService vagrantService; 23 | 24 | private Arguments arguments; 25 | 26 | @Before 27 | public void setUp() throws Exception { 28 | arguments = new Arguments(environment); 29 | } 30 | 31 | @After 32 | public void tearDown() throws Exception { 33 | arguments = null; 34 | } 35 | 36 | @Test 37 | public void buildFile() throws Exception { 38 | vagrantService.buildFile(arguments); 39 | } 40 | 41 | } -------------------------------------------------------------------------------- /src/main/java/veil/hdp/vagrant/generator/service/DockerService.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator.service; 2 | 3 | import org.apache.commons.io.FileUtils; 4 | import org.springframework.stereotype.Service; 5 | import veil.hdp.vagrant.generator.model.Arguments; 6 | 7 | import java.io.File; 8 | import java.io.IOException; 9 | import java.nio.charset.StandardCharsets; 10 | import java.util.Map; 11 | 12 | @Service 13 | public class DockerService extends AbstractFileService { 14 | private static final String DOCKERFILE = "Dockerfile"; 15 | 16 | @Override 17 | protected void buildFile(Map model, Arguments arguments) { 18 | 19 | String dockerFile = convertTemplateToString(resolver, "templates/docker/dockerfile.mustache", model); 20 | String krb5 = convertTemplateToString(resolver, "templates/common/kerberos/krb5.mustache", model); 21 | String kdc = convertTemplateToString(resolver, "templates/common/kerberos/kdc.mustache", model); 22 | String kadm5 = convertTemplateToString(resolver, "templates/common/kerberos/kadm5.mustache", model); 23 | 24 | 25 | final String parentDirectoryName = "out/" + arguments.getFqdn(); 26 | 27 | try { 28 | FileUtils.writeStringToFile(new File(parentDirectoryName, DOCKERFILE), dockerFile, StandardCharsets.UTF_8); 29 | FileUtils.writeStringToFile(new File(parentDirectoryName, "krb5.conf"), krb5, StandardCharsets.UTF_8); 30 | FileUtils.writeStringToFile(new File(parentDirectoryName, "kdc.conf"), kdc, StandardCharsets.UTF_8); 31 | FileUtils.writeStringToFile(new File(parentDirectoryName, "kadm5.acl"), kadm5, StandardCharsets.UTF_8); 32 | } catch (IOException e) { 33 | log.error(e.getMessage(), e); 34 | } 35 | 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/resources/application.properties: -------------------------------------------------------------------------------- 1 | # ####################################### Logging Configurations 2 | 3 | logging.level.root=WARN 4 | logging.level.veil=INFO 5 | 6 | logging.file=hdp-vagrant-generator.log 7 | 8 | # ####################################### Virtual Box Configurations 9 | 10 | # fully qualified domain name (FQDN) of virtual box image 11 | vm.fqdn=default.hdp.local 12 | 13 | # dns hostname of virtual box image 14 | vm.hostname=default 15 | 16 | # ip address of virtual box image 17 | vm.ip=192.168.7.101 18 | 19 | # ram allocated to virtual box image in MB 20 | vm.memory=8192 21 | 22 | # number of cores allocated to virtual box image 23 | vm.cores=4 24 | 25 | # number of disks allocated virtual box image 26 | vm.disks=1 27 | 28 | # do you want yum to run update command during vagrant provisioning 29 | vm.update.yum=true 30 | 31 | 32 | # ####################################### HDP Configurations 33 | 34 | # HDP stack version 35 | hdp.stack.version=2.6 36 | 37 | # HDP Ambari version 38 | hdp.ambari.version=2.6.0.0 39 | 40 | # Kerberos enabled (defaults to false)? 41 | hdp.kerberos.enabled=false 42 | 43 | # if Kerberos is enabled, the realm (ex. "example.com") 44 | hdp.kerberos.realm= 45 | 46 | # HDP optional components installed 47 | hdp.components=hive,spark 48 | 49 | 50 | # ####################################### Custom Ambari Repository Configurations 51 | 52 | # is custom repo enabled (defaults to false)? 53 | custom.repo.enabled= 54 | 55 | # the fqdn of the custom repo (used in VM hosts file) 56 | custom.repo.fqdn= 57 | 58 | # the ip of the custom repo (used in VM hosts file) 59 | custom.repo.ip= 60 | 61 | # the url of the ambari.repo file in the custom repository (ex. "http://repo.hdp.local/repos/centos7/ambari/2.4.2.0/ambari.repo") 62 | custom.repo.ambari.url= 63 | 64 | # Custom Base URL for HDP Repo (ex. "http://repo.hdp.local/hdp/centos7/HDP-2.5.3.0") 65 | custom.repo.hdp.url= 66 | 67 | # Custom Base URL for HDP Utils Repo (ex. "http://repo.hdp.local/hdp/centos7/HDP-UTILS-1.1.0.21") 68 | custom.repo.hdp-utils.url= 69 | -------------------------------------------------------------------------------- /src/main/java/veil/hdp/vagrant/generator/Constants.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator; 2 | 3 | public class Constants { 4 | public static final String VM_FQDN = "vm.fqdn"; 5 | public static final String VM_HOSTNAME = "vm.hostname"; 6 | public static final String VM_IP = "vm.ip"; 7 | public static final String VM_MEMORY = "vm.memory"; 8 | public static final String VM_CORES = "vm.cores"; 9 | public static final String VM_UPDATE_YUM = "vm.update.yum"; 10 | public static final String VM_DISKS = "vm.disks"; 11 | public static final String HDP_AMBARI_API_BLUEPRINTS_URL = "hdp.ambari.api.blueprints.url"; 12 | public static final String HDP_AMBARI_API_CLUSTERS_URL = "hdp.ambari.api.clusters.url"; 13 | public static final String HDP_AMBARI_API_CLUSTERS_REQUESTS_URL = "hdp.ambari.api.clusters.requests.url"; 14 | 15 | public static final String HDP_REPO_AMBARI_FILE = "hdp.repo.ambari.file"; 16 | public static final String HDP_COMPONENTS = "hdp.components"; 17 | public static final String HDP_STACK_VERSION = "hdp.stack.version"; 18 | 19 | 20 | public static final String HDP_REPO_FQDN = "hdp.repo.fqdn"; 21 | public static final String HDP_REPO_IP = "hdp.repo.ip"; 22 | public static final String HDP_REPO_BASE = "hdp.repo.base"; 23 | public static final String HDP_REPO_UTILS_BASE = "hdp.repo.utils.base"; 24 | public static final String HDP_AMBARI_API_REPOSITORIES_HDP_URL = "hdp.ambari.api.repositories.hdp.url"; 25 | public static final String HDP_AMBARI_API_REPOSITORIES_HDPUTILS_URL = "hdp.ambari.api.repositories.hdputils.url"; 26 | 27 | 28 | public static final String FORMAT_SPACER = "*** %-40s %s\n"; 29 | public static final String FORMAT_NEW_LINE = "%s\n"; 30 | public static final String HDP_KERBEROS_ENABLED = "hdp.kerberos.enabled"; 31 | public static final String HDP_KERBEROS_REALM = "hdp.kerberos.realm"; 32 | public static final String HDP_AMBARI_VERSION = "hdp.ambari.version"; 33 | 34 | public static final String KDC_ADMIN = "admin/admin"; 35 | public static final String KDC_PASSWORD = "password"; 36 | } 37 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 4 | 4.0.0 5 | 6 | veil.hdp.vagrant 7 | hdp-vagrant-generator 8 | 1.0.0 9 | jar 10 | 11 | HDP Vagrant Generator 12 | HDP Vagrant Generator 13 | 14 | 15 | org.springframework.boot 16 | spring-boot-starter-parent 17 | 1.5.3.RELEASE 18 | 19 | 20 | 21 | 22 | UTF-8 23 | 1.8 24 | 1.8 25 | 1.8 26 | 27 | 28 | 29 | 30 | 31 | org.springframework.boot 32 | spring-boot-starter 33 | 34 | 35 | 36 | org.springframework.boot 37 | spring-boot-starter-mustache 38 | 39 | 40 | 41 | org.springframework.boot 42 | spring-boot-starter-test 43 | test 44 | 45 | 46 | 47 | com.google.guava 48 | guava 49 | 20.0 50 | 51 | 52 | 53 | commons-io 54 | commons-io 55 | 2.5 56 | 57 | 58 | 59 | org.apache.commons 60 | commons-lang3 61 | 3.5 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | org.springframework.boot 70 | spring-boot-maven-plugin 71 | 72 | 73 | 74 | 75 | 76 | -------------------------------------------------------------------------------- /src/main/resources/templates/vagrant/vagrantfile.mustache: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # generated by https://github.com/timveil/hdp-vagrant-generator on {{generatedDate}} 5 | 6 | require 'ffi' 7 | 8 | Vagrant.configure("2") do |config| 9 | config.vm.box = "timveil/centos7-hdp-base" 10 | config.vm.box_check_update = true 11 | 12 | # config.vm.hostname is broken in vagrant 1.9.0; works in 1.9.1; broken in 1.9.2 (windows); broken 1.9.3 (mac); broken in 1.9.6 (windows); works 1.9.7 (mac); works on windows 1.9.7 with updated base box 13 | # configure VM 14 | config.vm.hostname = '{{arguments.fqdn}}' 15 | config.vm.network "private_network", ip: '{{arguments.ip}}' 16 | config.vm.provider "virtualbox" do |v| 17 | v.memory = {{arguments.memory}} 18 | v.cpus = {{arguments.cores}} 19 | v.name = '{{arguments.fqdn}}' 20 | end 21 | 22 | # install on windows or mac: vagrant plugin install vagrant-hostsupdater 23 | # configure host updater plugin - https://github.com/cogitatio/vagrant-hostsupdater 24 | config.hostsupdater.aliases = ["{{arguments.hostname}}"] 25 | 26 | # install on windows or mac: vagrant plugin install vagrant-vbguest 27 | # configure VirtualBox Guest Additions plugin - https://github.com/dotless-de/vagrant-vbguest 28 | # for windows see https://github.com/cogitatio/vagrant-hostsupdater/issues/89. you'll likely need to change the permissions of the host file. 29 | config.vbguest.auto_update = true 30 | config.vbguest.no_remote = true 31 | config.vbguest.no_install = false 32 | 33 | # workaround for https://github.com/mitchellh/vagrant/issues/8096; impacts 1.9.1 34 | config.vm.provision "shell", inline: "service network restart", run: "always" 35 | 36 | # provision VM 37 | config.vm.provision "file", source: "hosts", destination: "/tmp/install/hosts" 38 | config.vm.provision "file", source: "blueprint.json", destination: "/tmp/install/blueprint.json" 39 | config.vm.provision "file", source: "create-cluster.json", destination: "/tmp/install/create-cluster.json" 40 | {{#arguments.customRepoEnabled}} 41 | config.vm.provision "file", source: "create-hdp-repo.json", destination: "/tmp/install/create-hdp-repo.json" 42 | config.vm.provision "file", source: "create-hdp-utils-repo.json", destination: "/tmp/install/create-hdp-utils-repo.json" 43 | {{/arguments.customRepoEnabled}} 44 | {{#arguments.kerberosEnabled}} 45 | config.vm.provision "file", source: "kadm5.acl", destination: "/tmp/install/kadm5.acl" 46 | config.vm.provision "file", source: "kdc.conf", destination: "/tmp/install/kdc.conf" 47 | config.vm.provision "file", source: "krb5.conf", destination: "/tmp/install/krb5.conf" 48 | {{/arguments.kerberosEnabled}} 49 | config.vm.provision "Install", type: "shell", path: "install.sh" 50 | config.vm.provision "Post Install", type: "shell", path: "post-install.sh" 51 | config.vm.provision "Ambari Details", type: "shell", inline: "echo 'Ambari URL is http://{{arguments.fqdn}}:8080 or http://{{arguments.hostname}}:8080 or http://{{arguments.ip}}:8080'", run: "always" 52 | end 53 | -------------------------------------------------------------------------------- /src/main/java/veil/hdp/vagrant/generator/service/VagrantService.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator.service; 2 | 3 | import com.google.common.collect.ImmutableMap; 4 | import org.springframework.stereotype.Service; 5 | import veil.hdp.vagrant.generator.model.Arguments; 6 | 7 | import java.util.Map; 8 | 9 | @Service 10 | public class VagrantService extends AbstractFileService { 11 | 12 | private static final String VAGRANTFILE = "Vagrantfile"; 13 | private static final String INSTALL_SH = "install.sh"; 14 | private static final String POST_INSTALL_SH = "post-install.sh"; 15 | private static final String HOSTS = "hosts"; 16 | private static final String BLUEPRINT_JSON = "blueprint.json"; 17 | private static final String CREATE_CLUSTER_JSON = "create-cluster.json"; 18 | private static final String CREATE_HDP_REPO_JSON = "create-hdp-repo.json"; 19 | private static final String CREATE_HDP_UTILS_REPO_JSON = "create-hdp-utils-repo.json"; 20 | private static final String KRB5_CONF = "krb5.conf"; 21 | private static final String KDC_CONF = "kdc.conf"; 22 | private static final String KADM5_ACL = "kadm5.acl"; 23 | 24 | @Override 25 | protected void buildFile(Map model, Arguments arguments) { 26 | 27 | final String parentDirectoryName = "target/tests/" + arguments.getFqdn(); 28 | 29 | writeContentsToFile(parentDirectoryName, INSTALL_SH, convertTemplateToString(resolver, "templates/common/shell/install.mustache", model)); 30 | writeContentsToFile(parentDirectoryName, POST_INSTALL_SH, convertTemplateToString(resolver, "templates/common/shell/post-install.mustache", model)); 31 | writeContentsToFile(parentDirectoryName, HOSTS, convertTemplateToString(resolver, "templates/common/network/hosts.mustache", model)); 32 | writeContentsToFile(parentDirectoryName, BLUEPRINT_JSON, convertTemplateToString(resolver, "templates/common/json/blueprint.mustache", model)); 33 | writeContentsToFile(parentDirectoryName, CREATE_CLUSTER_JSON, convertTemplateToString(resolver, "templates/common/json/create-cluster.mustache", model)); 34 | 35 | if (arguments.isCustomRepoEnabled()) { 36 | writeContentsToFile(parentDirectoryName, CREATE_HDP_REPO_JSON, convertTemplateToString(resolver, "templates/common/json/create-repository.mustache", ImmutableMap.of("url", arguments.getCustomRepoHdpUrl()))); 37 | writeContentsToFile(parentDirectoryName, CREATE_HDP_UTILS_REPO_JSON, convertTemplateToString(resolver, "templates/common/json/create-repository.mustache", ImmutableMap.of("url", arguments.getCustomRepoHdpUtilsUrl()))); 38 | } 39 | 40 | if (arguments.isKerberosEnabled()) { 41 | writeContentsToFile(parentDirectoryName, KRB5_CONF, convertTemplateToString(resolver, "templates/common/kerberos/krb5.mustache", model)); 42 | writeContentsToFile(parentDirectoryName, KDC_CONF, convertTemplateToString(resolver, "templates/common/kerberos/kdc.mustache", model)); 43 | writeContentsToFile(parentDirectoryName, KADM5_ACL, convertTemplateToString(resolver, "templates/common/kerberos/kadm5.mustache", model)); 44 | } 45 | 46 | writeContentsToFile(parentDirectoryName, VAGRANTFILE, convertTemplateToString(resolver, "templates/vagrant/vagrantfile.mustache", model)); 47 | 48 | 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /src/main/resources/templates/common/shell/install.mustache: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | systemctl start ntpd 4 | 5 | {{#logger}}updating hosts file{{/logger}} 6 | sudo mv /tmp/install/hosts /etc/hosts 7 | sudo chown root:root /etc/hosts 8 | 9 | {{#logger}}updating hostname{{/logger}} 10 | hostnamectl --static set-hostname {{arguments.hostname}} 11 | 12 | {{#logger}}get ambari repo{{/logger}} 13 | wget -nv {{ambariRepoFileUrl}} -O /etc/yum.repos.d/ambari.repo 14 | 15 | {{#arguments.updateYum}} 16 | {{#logger}}running yum update{{/logger}} 17 | yum update -y && yum clean all 18 | {{/arguments.updateYum}} 19 | 20 | {{#logger}}running yum install{{/logger}} 21 | yum install{{#arguments.kerberosEnabled}} rng-tools krb5-libs krb5-server krb5-workstation{{/arguments.kerberosEnabled}} ambari-server ambari-agent -y && yum clean all 22 | 23 | {{#logger}}updating ambari-agent.ini{{/logger}} 24 | sed -i "s/^hostname=localhost/hostname={{arguments.fqdn}}/g" /etc/ambari-agent/conf/ambari-agent.ini 25 | 26 | {{#arguments.kerberosEnabled}} 27 | {{#logger}}starting rngd to increase entropy for kdb5_util create{{/logger}} 28 | systemctl start rngd 29 | systemctl enable rngd 30 | 31 | {{#logger}}updating kerberos config{{/logger}} 32 | sudo mv /tmp/install/kadm5.acl /var/kerberos/krb5kdc/kadm5.acl 33 | sudo chown root:root /var/kerberos/krb5kdc/kadm5.acl 34 | 35 | sudo mv /tmp/install/kdc.conf /var/kerberos/krb5kdc/kdc.conf 36 | sudo chown root:root /var/kerberos/krb5kdc/kdc.conf 37 | 38 | sudo mv /tmp/install/krb5.conf /etc/krb5.conf 39 | sudo chown root:root /etc/krb5.conf 40 | 41 | {{#logger}}creating kerberos database{{/logger}} 42 | kdb5_util create -s -P {{kdcPassword}} 43 | 44 | {{#logger}}adding kerberos principal{{/logger}} 45 | kadmin.local -q 'addprinc -pw {{kdcPassword}} {{kdcAdmin}}' -w {{kdcPassword}} 46 | 47 | {{#logger}}starting kerberos services{{/logger}} 48 | systemctl start krb5kdc 49 | systemctl start kadmin 50 | systemctl enable krb5kdc 51 | systemctl enable kadmin 52 | {{/arguments.kerberosEnabled}} 53 | 54 | {{#logger}}running ambari setup{{/logger}} 55 | ambari-server setup -s 56 | 57 | {{#logger}}starting ambari server and agent{{/logger}} 58 | ambari-server start 59 | ambari-agent start 60 | 61 | {{#logger}}sleep before calling ambari REST apis{{/logger}} 62 | sleep 30 63 | 64 | curl --silent --show-error -H "{{requestedBy}}" -X POST -d '@/tmp/install/blueprint.json' -u admin:admin {{createBlueprintUrl}} 65 | {{#arguments.customRepoEnabled}} 66 | curl --silent --show-error -H "{{requestedBy}}" -X PUT -d '@/tmp/install/create-hdp-repo.json' -u admin:admin {{createHdpRepositoryUrl}} 67 | curl --silent --show-error -H "{{requestedBy}}" -X PUT -d '@/tmp/install/create-hdp-utils-repo.json' -u admin:admin {{createHdpUtilsRepositoryUrl}} 68 | {{/arguments.customRepoEnabled}} 69 | curl --silent --show-error -H "{{requestedBy}}" -X POST -d '@/tmp/install/create-cluster.json' -u admin:admin {{createClusterUrl}} 70 | 71 | {{#logger}}sleep before checking progress{{/logger}} 72 | sleep 30 73 | 74 | {{#logger}}checking progress{{/logger}} 75 | PROGRESS=0 76 | until [ $PROGRESS -eq 100 ]; do 77 | PROGRESS=`curl --silent --show-error -H "{{requestedBy}}" -X GET -u admin:admin {{createClusterStatusUrl}} 2>&1 | grep -oP '\"progress_percent\"\s+\:\s+\K[0-9]+'` 78 | TIMESTAMP=$(date "+%m/%d/%y %H:%M:%S") 79 | echo -ne "$TIMESTAMP - $PROGRESS percent complete!"\\r 80 | sleep 60 81 | done 82 | 83 | {{#logger}}adding users{{/logger}} 84 | useradd -G hdfs admin 85 | usermod -a -G users admin 86 | usermod -a -G hadoop admin 87 | usermod -a -G hive admin 88 | 89 | usermod -a -G users vagrant 90 | usermod -a -G hdfs vagrant 91 | usermod -a -G hadoop vagrant 92 | usermod -a -G hive vagrant 93 | 94 | {{#logger}}doing some hdfs view stuff{{/logger}} 95 | sudo su - hdfs << EOF 96 | hadoop fs -mkdir /user/admin 97 | hadoop fs -chown admin:hadoop /user/admin 98 | EOF 99 | 100 | {{#logger}}final yum cleanup{{/logger}} 101 | yum clean all -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | >**Most of my focus is now on building Hadoop clusters in Docker. See https://github.com/timveil/docker-hadoop. This repository is no longer maintained.** 2 | 3 | # HDP Vagrant Generator 4 | 5 | ## Overview 6 | 7 | A `Vagrantfile` generator for Hortonworks Data Platform (HDP). Built using Spring Boot, this application will generate a `Vagrantfile` file and supporting files based on the supplied `application.properties`. This makes it very easy to create purpose built, custom Virtual Box HDP instances that are properly configured for your use case and hardware. 8 | 9 | 10 | ## How to run 11 | 12 | To run, simply download or build the latest `hdp-vagrant-generator-*.jar`. If you would like to customize, place a copy of `application.properties` in the same directory as the `hdp-vagrant-generator-*.jar` and from a command line run `java -jar hdp-vagrant-generator-*.jar`. A directory named `out` will be created with your new `Vagrantfile`, plus supporting files inside. 13 | 14 | To use the newly created `Vagrantfile` make sure you have Vagrant installed along with the following plugins: 15 | 16 | ```sh 17 | vagrant plugin install vagrant-hostsupdater 18 | vagrant plugin install vagrant-vbguest 19 | ``` 20 | 21 | Assuming, Vagrant and the required plugins are installed you can use you new image by running `vagrant up`. The `Vagrantfile` will provision an HDP cluster based on your specifications. The process usually takes about 20 minutes based on hardware and network connectivity. 22 | 23 | ## How to customize 24 | 25 | You can change the configuration of the Virtual Box image or the HDP cluster by creating a file called `application.properties` and placing it in the same directory as `hdp-vagrant-generator-*.jar`. The following values are used by default if no custom `application.properties` is provided. 26 | 27 | See [HDP Vagrant Local Repo](https://github.com/timveil/hdp-vagrant-local-repo) for a quick and easy way to spin up a local yum repository using Vagrant. 28 | 29 | ```dosini 30 | # ####################################### Logging Configurations 31 | 32 | logging.level.root=WARN 33 | logging.level.veil=INFO 34 | 35 | logging.file=hdp-vagrant-generator.log 36 | 37 | # ####################################### Virtual Box Configurations 38 | 39 | # fully qualified domain name (FQDN) of virtual box image 40 | vm.fqdn=default.hdp.local 41 | 42 | # dns hostname of virtual box image 43 | vm.hostname=default 44 | 45 | # ip address of virtual box image 46 | vm.ip=192.168.7.101 47 | 48 | # ram allocated to virtual box image in MB 49 | vm.memory=8192 50 | 51 | # number of cores allocated to virtual box image 52 | vm.cores=4 53 | 54 | # number of disks allocated virtual box image 55 | vm.disks=1 56 | 57 | # do you want yum to run update command during vagrant provisioning 58 | vm.update.yum=true 59 | 60 | 61 | # ####################################### HDP Configurations 62 | 63 | # HDP stack version 64 | hdp.stack.version=2.6 65 | 66 | # HDP Ambari version 67 | hdp.ambari.version=2.6.0.0 68 | 69 | # Kerberos enabled (defaults to false)? 70 | hdp.kerberos.enabled= 71 | 72 | # if Kerberos is enabled, the realm (ex. "example.com") 73 | hdp.kerberos.realm= 74 | 75 | # HDP optional components installed 76 | hdp.components=hive,spark 77 | 78 | 79 | # ####################################### Custom Ambari Repository Configurations 80 | 81 | # is custom repo enabled (defaults to false)? 82 | custom.repo.enabled= 83 | 84 | # the fqdn of the custom repo (used in VM hosts file) 85 | custom.repo.fqdn= 86 | 87 | # the ip of the custom repo (used in VM hosts file) 88 | custom.repo.ip= 89 | 90 | # the url of the ambari.repo file in the custom repository (ex. "http://repo.hdp.local/repos/centos7/ambari/2.4.2.0/ambari.repo") 91 | custom.repo.ambari.url= 92 | 93 | # Custom Base URL for HDP Repo (ex. "http://repo.hdp.local/hdp/centos7/HDP-2.5.3.0") 94 | custom.repo.hdp.url= 95 | 96 | # Custom Base URL for HDP Utils Repo (ex. "http://repo.hdp.local/hdp/centos7/HDP-UTILS-1.1.0.21") 97 | custom.repo.hdp-utils.url= 98 | ``` 99 | -------------------------------------------------------------------------------- /src/main/resources/templates/common/json/blueprint.mustache: -------------------------------------------------------------------------------- 1 | { 2 | "configurations": [ 3 | { 4 | "hdfs-site": { 5 | "properties": { 6 | "dfs.replication": "1" 7 | } 8 | } 9 | }, 10 | { 11 | "core-site" : { 12 | "properties" : { 13 | "hadoop.proxyuser.root.groups" : "*", 14 | "hadoop.proxyuser.root.hosts" : "*" 15 | {{#arguments.kerberosEnabled}} 16 | ,"hadoop.proxyuser.ambari-server-{{clusterName}}.groups" : "*" 17 | ,"hadoop.proxyuser.ambari-server-{{clusterName}}.hosts" : "*" 18 | {{/arguments.kerberosEnabled}} 19 | } 20 | } 21 | }, 22 | { 23 | "hive-site" : { 24 | "properties" : { 25 | "hive.server2.transport.mode" : "binary" 26 | } 27 | } 28 | }, 29 | { 30 | "hadoop-env": { 31 | "properties": { 32 | "namenode_heapsize": "1536m" 33 | } 34 | } 35 | } 36 | {{#containsHbase}} 37 | ,{ 38 | "hbase-env": { 39 | "properties": { 40 | "phoenix_sql_enabled": "true" 41 | } 42 | } 43 | } 44 | {{/containsHbase}} 45 | {{#arguments.kerberosEnabled}} 46 | ,{ 47 | "kerberos-env": { 48 | "properties" : { 49 | "realm" : "{{realmUpper}}", 50 | "kdc_type" : "mit-kdc", 51 | "kdc_hosts" : "{{arguments.fqdn}}", 52 | "ldap_url" : "", 53 | "container_dn" : "", 54 | "encryption_types" : "aes des3-cbc-sha1 rc4 des-cbc-md5", 55 | "admin_server_host" : "{{arguments.fqdn}}" 56 | } 57 | } 58 | }, 59 | { 60 | "krb5-conf": { 61 | "properties" : { 62 | "domains" : "{{realmLower}}, .{{realmLower}}", 63 | "manage_krb5_conf" : "true" 64 | } 65 | } 66 | } 67 | {{/arguments.kerberosEnabled}} 68 | ], 69 | "host_groups": [ 70 | { 71 | "name": "host_group_1", 72 | "configurations": [], 73 | "cardinality": "1", 74 | "components": [ 75 | {"name": "AMBARI_SERVER"}, 76 | {"name": "APP_TIMELINE_SERVER"}, 77 | {"name": "DATANODE"}, 78 | {{#containsHbase}} 79 | {"name": "HBASE_MASTER"}, 80 | {"name": "HBASE_REGIONSERVER"}, 81 | {"name": "HBASE_CLIENT"}, 82 | {"name": "PHOENIX_QUERY_SERVER"}, 83 | {{/containsHbase}} 84 | {"name": "HCAT"}, 85 | {"name": "HDFS_CLIENT"}, 86 | {"name": "HISTORYSERVER"}, 87 | {{#containsHive}} 88 | {"name": "HIVE_CLIENT"}, 89 | {"name": "HIVE_METASTORE"}, 90 | {"name": "HIVE_SERVER"}, 91 | {{/containsHive}} 92 | {"name": "INFRA_SOLR_CLIENT"}, 93 | {"name": "INFRA_SOLR"}, 94 | {{#arguments.kerberosEnabled}} 95 | {"name": "KERBEROS_CLIENT"}, 96 | {{/arguments.kerberosEnabled}} 97 | {"name": "MAPREDUCE2_CLIENT"}, 98 | {"name": "METRICS_COLLECTOR"}, 99 | {"name": "METRICS_GRAFANA"}, 100 | {"name": "METRICS_MONITOR"}, 101 | {"name": "MYSQL_SERVER"}, 102 | {"name": "NAMENODE"}, 103 | {"name": "NODEMANAGER"}, 104 | {"name": "PIG"}, 105 | {"name": "RESOURCEMANAGER"}, 106 | {"name": "SECONDARY_NAMENODE"}, 107 | {"name": "SLIDER"}, 108 | {{#containsSpark}} 109 | {"name": "SPARK_CLIENT"}, 110 | {"name": "SPARK_JOBHISTORYSERVER"}, 111 | {"name": "SPARK_THRIFTSERVER"}, 112 | {"name": "LIVY_SERVER"}, 113 | {{/containsSpark}} 114 | {"name": "TEZ_CLIENT"}, 115 | {"name": "WEBHCAT_SERVER"}, 116 | {"name": "YARN_CLIENT"}, 117 | {"name": "ZOOKEEPER_CLIENT"}, 118 | {"name": "ZOOKEEPER_SERVER"} 119 | ] 120 | } 121 | ], 122 | "Blueprints": { 123 | "blueprint_name": "{{blueprintName}}", 124 | "stack_name": "HDP", 125 | "stack_version": "{{arguments.stackVersion}}" 126 | {{#arguments.kerberosEnabled}} 127 | , "security" : { 128 | "type" : "KERBEROS" 129 | } 130 | {{/arguments.kerberosEnabled}} 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /src/main/java/veil/hdp/vagrant/generator/service/AbstractFileService.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator.service; 2 | 3 | import com.fasterxml.jackson.databind.JsonNode; 4 | import com.fasterxml.jackson.databind.ObjectMapper; 5 | import com.samskivert.mustache.Mustache; 6 | import com.samskivert.mustache.Template; 7 | import org.apache.commons.io.FileUtils; 8 | import org.slf4j.Logger; 9 | import org.slf4j.LoggerFactory; 10 | import org.springframework.beans.factory.annotation.Autowired; 11 | import org.springframework.core.io.Resource; 12 | import org.springframework.core.io.support.PathMatchingResourcePatternResolver; 13 | import org.springframework.core.io.support.ResourcePatternResolver; 14 | import veil.hdp.vagrant.generator.Constants; 15 | import veil.hdp.vagrant.generator.model.Arguments; 16 | import veil.hdp.vagrant.generator.model.Component; 17 | 18 | import java.io.*; 19 | import java.nio.charset.StandardCharsets; 20 | import java.text.MessageFormat; 21 | import java.text.SimpleDateFormat; 22 | import java.util.Date; 23 | import java.util.HashMap; 24 | import java.util.Map; 25 | 26 | public abstract class AbstractFileService implements FileService { 27 | 28 | protected final Logger log = LoggerFactory.getLogger(getClass()); 29 | 30 | @Autowired 31 | protected Mustache.Compiler compiler; 32 | 33 | protected ResourcePatternResolver resolver; 34 | 35 | protected AbstractFileService() { 36 | resolver = new PathMatchingResourcePatternResolver(this.getClass().getClassLoader()); 37 | } 38 | 39 | @Override 40 | public void buildFile(Arguments arguments) { 41 | Map model = buildModel(arguments); 42 | 43 | buildFile(model, arguments); 44 | } 45 | 46 | protected abstract void buildFile(Map model, Arguments arguments); 47 | 48 | protected Map buildModel(Arguments arguments) { 49 | 50 | final String blueprintName="generated"; 51 | final String clusterName=arguments.getHostname(); 52 | 53 | Map model = new HashMap<>(); 54 | model.put("arguments", arguments); 55 | model.put("generatedDate", SimpleDateFormat.getDateTimeInstance(SimpleDateFormat.SHORT, SimpleDateFormat.SHORT).format(new Date())); 56 | model.put("requestedBy", "X-Requested-By: ambari"); 57 | model.put("clusterName", clusterName); 58 | model.put("blueprintName", blueprintName); 59 | 60 | boolean hasHive = arguments.getComponents().contains(Component.hive); 61 | boolean hasSpark = arguments.getComponents().contains(Component.spark); 62 | boolean hasHbase = arguments.getComponents().contains(Component.hbase); 63 | 64 | if (hasSpark || hasHbase) { 65 | hasHive = true; 66 | } 67 | 68 | model.put("containsHive", hasHive); 69 | model.put("containsSpark", hasSpark); 70 | model.put("containsHbase", hasHbase); 71 | 72 | model.put("createBlueprintUrl", MessageFormat.format("http://{0}:8080/api/v1/blueprints/{1}", arguments.getFqdn(), blueprintName)); 73 | model.put("createClusterUrl", MessageFormat.format("http://{0}:8080/api/v1/clusters/{1}", arguments.getFqdn(), clusterName)); 74 | model.put("createClusterStatusUrl", MessageFormat.format("http://{0}:8080/api/v1/clusters/{1}/requests/1", arguments.getFqdn(), clusterName)); 75 | 76 | if (arguments.isCustomRepoEnabled()) { 77 | model.put("ambariRepoFileUrl", arguments.getCustomRepoAmbariUrl()); 78 | //todo: don't like that the repository names are hardcoded. not sure how to get around this because i don't know where they come from 79 | model.put("createHdpRepositoryUrl", MessageFormat.format("http://{0}:8080/api/v1/stacks/HDP/versions/{1}/operating_systems/redhat7/repositories/HDP-2.5", arguments.getFqdn(), arguments.getStackVersion())); 80 | model.put("createHdpUtilsRepositoryUrl", MessageFormat.format("http://{0}:8080/api/v1/stacks/HDP/versions/{1}/operating_systems/redhat7/repositories/HDP-UTILS-1.1.0.21", arguments.getFqdn(), arguments.getStackVersion())); 81 | } else { 82 | model.put("ambariRepoFileUrl", MessageFormat.format("http://public-repo-1.hortonworks.com/ambari/centos7/2.x/updates/{0}/ambari.repo", arguments.getAmbariVersion())); 83 | } 84 | 85 | if (arguments.isKerberosEnabled()) { 86 | model.put("realmUpper", arguments.getKerberosRealm().toUpperCase()); 87 | model.put("realmLower", arguments.getKerberosRealm().toLowerCase()); 88 | model.put("kdcAdmin", Constants.KDC_ADMIN); 89 | model.put("kdcPassword", Constants.KDC_PASSWORD); 90 | } 91 | 92 | 93 | 94 | Mustache.Lambda logger = (frag, out) -> { 95 | out.write("echo \" \"\n"); 96 | out.write("echo \"---------------------------------------------------------------------------------------------------------------\"\n"); 97 | out.write("echo \"----- "); 98 | frag.execute(out); 99 | out.write("\"\n"); 100 | out.write("echo \"---------------------------------------------------------------------------------------------------------------\"\n"); 101 | out.write("echo \" \"\n"); 102 | }; 103 | 104 | model.put("logger", logger); 105 | 106 | return model; 107 | } 108 | 109 | protected void writeContentsToFile(String parentDirectoryName, String fileName, String contents) { 110 | try { 111 | FileUtils.writeStringToFile(new File(parentDirectoryName, fileName), contents, StandardCharsets.UTF_8); 112 | } catch (IOException e) { 113 | log.error(e.getMessage(), e); 114 | } 115 | } 116 | 117 | protected String compactJSON(String json) { 118 | ObjectMapper mapper = new ObjectMapper(); 119 | 120 | try { 121 | JsonNode jsonNode = mapper.readTree(json); 122 | return jsonNode.toString(); 123 | } catch (IOException e) { 124 | log.error(e.getMessage(), e); 125 | } 126 | 127 | return json; 128 | } 129 | 130 | protected String convertTemplateToString(ResourcePatternResolver resolver, String templateLocation, Map model) { 131 | Resource resource = resolver.getResource(templateLocation); 132 | StringWriter out = new StringWriter(); 133 | 134 | try (InputStreamReader reader = new InputStreamReader(resource.getInputStream())) { 135 | Template template = compiler.escapeHTML(false).compile(reader); 136 | template.execute(model, out); 137 | } catch (IOException e) { 138 | log.error(e.getMessage(), e); 139 | } 140 | 141 | return out.toString(); 142 | 143 | } 144 | 145 | } 146 | -------------------------------------------------------------------------------- /src/main/java/veil/hdp/vagrant/generator/model/Arguments.java: -------------------------------------------------------------------------------- 1 | package veil.hdp.vagrant.generator.model; 2 | 3 | import com.google.common.base.Splitter; 4 | import com.google.common.collect.Sets; 5 | import org.apache.commons.lang3.StringUtils; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | import org.springframework.core.env.Environment; 9 | import veil.hdp.vagrant.generator.Constants; 10 | 11 | import java.util.Formatter; 12 | import java.util.List; 13 | import java.util.Set; 14 | 15 | public class Arguments { 16 | 17 | private final Logger log = LoggerFactory.getLogger(getClass()); 18 | 19 | private String fqdn; 20 | 21 | private String hostname; 22 | 23 | private String ip; 24 | 25 | private Integer memory; 26 | 27 | private Integer cores; 28 | 29 | private Integer disks; 30 | 31 | private boolean updateYum; 32 | 33 | private String stackVersion; 34 | 35 | private String ambariVersion; 36 | 37 | private boolean kerberosEnabled; 38 | 39 | private String kerberosRealm; 40 | 41 | private Set components; 42 | 43 | private boolean customRepoEnabled; 44 | 45 | private String customRepoFqdn; 46 | 47 | private String customRepoIp; 48 | 49 | private String customRepoAmbariUrl; 50 | 51 | private String customRepoHdpUrl; 52 | 53 | private String customRepoHdpUtilsUrl; 54 | 55 | 56 | 57 | public Arguments(Environment environment) { 58 | this.fqdn = environment.getProperty(Constants.VM_FQDN, String.class); 59 | this.hostname = environment.getProperty(Constants.VM_HOSTNAME, String.class); 60 | 61 | if (StringUtils.isBlank(hostname)) { 62 | this.hostname = fqdn; 63 | } 64 | 65 | this.ip = environment.getProperty(Constants.VM_IP, String.class); 66 | this.memory = environment.getProperty(Constants.VM_MEMORY, Integer.class); 67 | this.cores = environment.getProperty(Constants.VM_CORES, Integer.class); 68 | this.updateYum = Boolean.parseBoolean(environment.getProperty(Constants.VM_UPDATE_YUM, String.class)); 69 | this.disks = environment.getProperty(Constants.VM_DISKS, Integer.class); 70 | this.stackVersion = environment.getProperty(Constants.HDP_STACK_VERSION, String.class); 71 | this.ambariVersion = environment.getProperty(Constants.HDP_AMBARI_VERSION, String.class); 72 | this.kerberosEnabled = environment.getProperty(Constants.HDP_KERBEROS_ENABLED, Boolean.class, false); 73 | 74 | if (kerberosEnabled) { 75 | this.kerberosRealm = environment.getProperty(Constants.HDP_KERBEROS_REALM, String.class); 76 | } 77 | 78 | this.customRepoEnabled = environment.getProperty("custom.repo.enabled", Boolean.class, false); 79 | 80 | if (customRepoEnabled) { 81 | this.customRepoFqdn = environment.getProperty("custom.repo.fqdn", String.class); 82 | this.customRepoIp = environment.getProperty("custom.repo.ip", String.class); 83 | this.customRepoAmbariUrl = environment.getProperty("custom.repo.ambari.url", String.class); 84 | this.customRepoHdpUrl = environment.getProperty("custom.repo.hdp.url", String.class); 85 | this.customRepoHdpUtilsUrl = environment.getProperty("custom.repo.hdp-utils.url", String.class); 86 | } 87 | 88 | final String componentString = environment.getProperty(Constants.HDP_COMPONENTS); 89 | final List componentStrings = Splitter.on(',').omitEmptyStrings().trimResults().splitToList(componentString); 90 | final Set components = Sets.newHashSet(); 91 | 92 | if (!componentStrings.isEmpty()) { 93 | for (String component : componentStrings) { 94 | components.add(Component.valueOf(component)); 95 | } 96 | } 97 | 98 | this.components = components; 99 | 100 | this.prettyPrint(); 101 | } 102 | 103 | public String getFqdn() { 104 | return fqdn; 105 | } 106 | 107 | public String getHostname() { 108 | return hostname; 109 | } 110 | 111 | public void setHostname(String hostname) { 112 | this.hostname = hostname; 113 | } 114 | 115 | public String getIp() { 116 | return ip; 117 | } 118 | 119 | public Integer getMemory() { 120 | return memory; 121 | } 122 | 123 | public Integer getCores() { 124 | return cores; 125 | } 126 | 127 | public Integer getDisks() { 128 | return disks; 129 | } 130 | 131 | public boolean isUpdateYum() { 132 | return updateYum; 133 | } 134 | 135 | public String getStackVersion() { 136 | return stackVersion; 137 | } 138 | 139 | public String getAmbariVersion() { 140 | return ambariVersion; 141 | } 142 | 143 | public boolean isKerberosEnabled() { 144 | return kerberosEnabled; 145 | } 146 | 147 | public String getKerberosRealm() { 148 | return kerberosRealm; 149 | } 150 | 151 | public Set getComponents() { 152 | return components; 153 | } 154 | 155 | public boolean isCustomRepoEnabled() { 156 | return customRepoEnabled; 157 | } 158 | 159 | public String getCustomRepoFqdn() { 160 | return customRepoFqdn; 161 | } 162 | 163 | public String getCustomRepoIp() { 164 | return customRepoIp; 165 | } 166 | 167 | public String getCustomRepoAmbariUrl() { 168 | return customRepoAmbariUrl; 169 | } 170 | 171 | public String getCustomRepoHdpUrl() { 172 | return customRepoHdpUrl; 173 | } 174 | 175 | public String getCustomRepoHdpUtilsUrl() { 176 | return customRepoHdpUtilsUrl; 177 | } 178 | 179 | private void prettyPrint() { 180 | StringBuilder builder = new StringBuilder(); 181 | Formatter formatter = new Formatter(builder); 182 | 183 | formatter.format(Constants.FORMAT_NEW_LINE, " "); 184 | formatter.format(Constants.FORMAT_NEW_LINE, "***********************************************************************"); 185 | formatter.format(Constants.FORMAT_NEW_LINE, "*** Arguments"); 186 | formatter.format(Constants.FORMAT_NEW_LINE, "***********************************************************************"); 187 | formatter.format(Constants.FORMAT_SPACER, Constants.VM_FQDN, fqdn); 188 | formatter.format(Constants.FORMAT_SPACER, Constants.VM_HOSTNAME, hostname); 189 | formatter.format(Constants.FORMAT_SPACER, Constants.VM_IP, ip); 190 | formatter.format(Constants.FORMAT_SPACER, Constants.VM_MEMORY, memory); 191 | formatter.format(Constants.FORMAT_SPACER, Constants.VM_CORES, cores); 192 | formatter.format(Constants.FORMAT_SPACER, Constants.VM_DISKS, disks); 193 | formatter.format(Constants.FORMAT_SPACER, Constants.VM_UPDATE_YUM, updateYum); 194 | formatter.format(Constants.FORMAT_SPACER, Constants.HDP_STACK_VERSION, stackVersion); 195 | formatter.format(Constants.FORMAT_SPACER, Constants.HDP_AMBARI_VERSION, ambariVersion); 196 | formatter.format(Constants.FORMAT_SPACER, Constants.HDP_KERBEROS_ENABLED, kerberosEnabled); 197 | formatter.format(Constants.FORMAT_SPACER, Constants.HDP_KERBEROS_REALM, kerberosRealm); 198 | formatter.format(Constants.FORMAT_SPACER, Constants.HDP_COMPONENTS, components); 199 | formatter.format(Constants.FORMAT_NEW_LINE, "***********************************************************************"); 200 | formatter.format(Constants.FORMAT_NEW_LINE, " "); 201 | 202 | log.info(builder.toString()); 203 | } 204 | 205 | } 206 | --------------------------------------------------------------------------------