├── .ruby-version ├── spec ├── data │ ├── Buildfile │ ├── Vagrantfile │ ├── packer.json │ └── Berksfile ├── resource │ ├── Buildfile-simple │ ├── Buildfile-with-poilcy │ ├── Buildfile-in-cookbook │ ├── Buildfile-with-overrides │ ├── Buildfile-with-overrides2 │ ├── Buildfile │ ├── Buildfile-with-block-device-mappings │ ├── Buildfile-home-directory │ ├── Buildfile-policy-organization │ ├── Buildfile-policy-buildtype1 │ ├── Buildfile-policy-buildtype2 │ └── Buildfile-with-post-processors ├── spec_helper.rb ├── config_spec.rb ├── interface_spec.rb └── version_spec.rb ├── Thorfile ├── .rspec ├── Buildfile ├── Rakefile ├── Gemfile ├── bin ├── build ├── build-data └── build-clean ├── lib ├── builderator.rb └── builderator │ ├── control │ ├── cookbook.rb │ ├── data.rb │ ├── data │ │ ├── ecr.rb │ │ └── image.rb │ ├── version │ │ ├── git.rb │ │ ├── auto.rb │ │ ├── comparable.rb │ │ ├── bump.rb │ │ └── scm.rb │ ├── version.rb │ └── cleaner.rb │ ├── util │ ├── task_exception.rb │ ├── aws_exception.rb │ └── limit_exception.rb │ ├── metadata.rb │ ├── interface │ ├── vagrant.rb │ ├── berkshelf.rb │ └── packer.rb │ ├── model │ ├── cleaner.rb │ └── cleaner │ │ ├── volumes.rb │ │ ├── scaling_groups.rb │ │ ├── launch_configs.rb │ │ ├── snapshots.rb │ │ ├── instances.rb │ │ └── images.rb │ ├── interface.rb │ ├── config │ ├── list.rb │ ├── rash.rb │ ├── defaults.rb │ ├── attributes.rb │ └── file.rb │ ├── tasks │ ├── version.rb │ ├── berkshelf.rb │ ├── vendor.rb │ ├── cookbook.rb │ ├── vagrant.rb │ └── packer.rb │ ├── patch │ └── thor-actions.rb │ ├── config.rb │ ├── util.rb │ └── tasks.rb ├── .travis.yml ├── template ├── rubocop.erb ├── Berksfile.erb ├── Gemfile.erb ├── Buildfile.erb ├── README.md.erb ├── gitignore.erb └── Vagrantfile.erb ├── .gitignore ├── cortex.yaml ├── .rubocop.yml ├── .rubocop_todo.yml ├── LICENSE.txt ├── docs ├── clean.md ├── configuration │ ├── cookbook.md │ └── profile.md ├── versioning.md └── configuration.md └── README.md /.ruby-version: -------------------------------------------------------------------------------- 1 | 2.6.3 2 | -------------------------------------------------------------------------------- /spec/data/Buildfile: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /spec/data/Vagrantfile: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /spec/data/packer.json: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /Thorfile: -------------------------------------------------------------------------------- 1 | require 'thor-scmversion' 2 | -------------------------------------------------------------------------------- /.rspec: -------------------------------------------------------------------------------- 1 | --color 2 | --require spec_helper 3 | -------------------------------------------------------------------------------- /Buildfile: -------------------------------------------------------------------------------- 1 | ## 2 | # Use Builderator to release Builderator! 3 | ## 4 | autoversion.create_tags true 5 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | require 'rspec/core/rake_task' 2 | 3 | RSpec::Core::RakeTask.new(:spec) 4 | 5 | task :default => :spec 6 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | # Specify your gem's dependencies in builderator.gemspec 4 | gemspec 5 | -------------------------------------------------------------------------------- /bin/build: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require_relative '../lib/builderator/tasks' 4 | Builderator::Tasks::CLI.start(ARGV) 5 | -------------------------------------------------------------------------------- /lib/builderator.rb: -------------------------------------------------------------------------------- 1 | require 'builderator/metadata' 2 | 3 | ## 4 | # Start Namespace 5 | ## 6 | module Builderator 7 | end 8 | -------------------------------------------------------------------------------- /spec/data/Berksfile: -------------------------------------------------------------------------------- 1 | require 'builderator/patch/berkshelf' 2 | 3 | source 'https://supermarket.chef.io' 4 | 5 | cookbook 'apt', '>= 0.0.0', {} 6 | cookbook 'etcd-v2', '>= 0.0.0', {} 7 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-simple: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a Buildfile in a project's working directory 3 | # without any cookbooks 4 | ## 5 | build_name 'builderator-simple-test' 6 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: ruby 2 | rvm: 3 | - 2.3.3 4 | - 2.3.4 5 | - 2.4.0 6 | - 2.4.1 7 | - ruby-head 8 | matrix: 9 | allow_failures: 10 | - rvm: ruby-head 11 | sudo: false 12 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-with-poilcy: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a Buildfile in a project's working directory 3 | # without any cookbooks 4 | ## 5 | build_name 'builderator-with-policy-test' 6 | 7 | policy(:test).path './Buildfile-policy-buildtype1' 8 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-in-cookbook: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a Buildfile in a cookbook project's working directory 3 | ## 4 | build_name 'builderator-testing' 5 | 6 | policy(:test).path './Buildfile-policy1' 7 | 8 | vendor :test_policies do |vendor| 9 | vendor.path '.' 10 | end 11 | -------------------------------------------------------------------------------- /template/rubocop.erb: -------------------------------------------------------------------------------- 1 | AllCops: 2 | Exclude: 3 | - libraries/**/* 4 | - spec/**/* 5 | - metadata.rb 6 | 7 | Encoding: 8 | Enabled: false 9 | LineLength: 10 | Enabled: false 11 | HashSyntax: 12 | Enabled: false 13 | RescueModifier: 14 | Enabled: false 15 | MethodLength: 16 | Max: 24 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.bundle/ 2 | /.yardoc 3 | /Gemfile.lock 4 | /_yardoc/ 5 | /coverage/ 6 | /doc/ 7 | /pkg/ 8 | /spec/reports/ 9 | /tmp/ 10 | *.bundle 11 | *.so 12 | *.o 13 | *.a 14 | mkmf.log 15 | vendor 16 | 17 | .builderator 18 | .vagrant 19 | Berksfile.lock 20 | /VERSION 21 | *.patch 22 | .ruby-gemset 23 | *.sw[a-z] 24 | -------------------------------------------------------------------------------- /template/Berksfile.erb: -------------------------------------------------------------------------------- 1 | <% cookbook.sources.each do |s| -%> 2 | source '<%= s %>' 3 | <% end -%> 4 | <% if cookbook.metadata %>metadata<% end -%> 5 | 6 | <% cookbook.depends.each do |name, cookbook| -%> 7 | cookbook '<%= name %>', '<%= cookbook.fetch(:version, '>= 0.0.0') %>', <%= cookbook.to_hash %> 8 | <% end -%> 9 | 10 | solver :<%= berkshelf.solver %> 11 | -------------------------------------------------------------------------------- /lib/builderator/control/cookbook.rb: -------------------------------------------------------------------------------- 1 | require 'pathname' 2 | 3 | module Builderator 4 | module Control 5 | ## 6 | # Cookbook logic and helpers 7 | ## 8 | module Cookbook 9 | class << self 10 | def exist? 11 | Pathname.new(Config.cookbook.path).join('metadata.rb').exist? 12 | end 13 | end 14 | end 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /cortex.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | info: 3 | title: Builderator 4 | description: Tools to make CI Packer builds awesome 5 | x-cortex-git: 6 | github: 7 | alias: r7org 8 | repository: rapid7/builderator 9 | x-cortex-tag: builderator 10 | x-cortex-type: service 11 | x-cortex-dependency: [] 12 | x-cortex-domain-parents: 13 | - tag: pd-automation 14 | openapi: 3.0.1 15 | servers: 16 | - url: "/" 17 | -------------------------------------------------------------------------------- /lib/builderator/control/data.rb: -------------------------------------------------------------------------------- 1 | module Builderator 2 | module Control 3 | ## 4 | # Wrapper module for lookup controllers 5 | ## 6 | module Data 7 | def self.lookup(source, query) 8 | fail "#{ source } is not a valid data type!" unless respond_to?(source) 9 | 10 | send(source, query) 11 | end 12 | end 13 | end 14 | end 15 | 16 | require_relative './data/image' 17 | require_relative './data/ecr' 18 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | inherit_from: .rubocop_todo.yml 2 | 3 | Metrics/AbcSize: 4 | Max: 48 5 | Metrics/CyclomaticComplexity: 6 | Max: 24 7 | Metrics/MethodLength: 8 | Max: 48 9 | Metrics/PerceivedComplexity: 10 | Max: 12 11 | 12 | Encoding: 13 | Enabled: false 14 | LineLength: 15 | Enabled: false 16 | HashSyntax: 17 | Enabled: false 18 | FileName: 19 | Enabled: false 20 | RescueModifier: 21 | Enabled: false 22 | SpaceInsideStringInterpolation: 23 | Enabled: false 24 | -------------------------------------------------------------------------------- /lib/builderator/util/task_exception.rb: -------------------------------------------------------------------------------- 1 | module Builderator 2 | module Util 3 | ## 4 | # Generic wrapper for exceptions in Thor Tasks 5 | ## 6 | class TaskException < StandardError 7 | attr_reader :task 8 | 9 | def initialize(status, task, color = :red) 10 | @status = status 11 | @task = task 12 | @color = color 13 | end 14 | 15 | def status 16 | [@status, message, @color] 17 | end 18 | end 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /lib/builderator/metadata.rb: -------------------------------------------------------------------------------- 1 | require_relative './util' 2 | 3 | # :nodoc: 4 | module Builderator 5 | VERSION = Util.source_path('VERSION').read rescue '0.0.1' 6 | DESCRIPTION = 'Builderator automates many of the common steps required to build VMs '\ 7 | 'and images with Chef. It provides a common configuration layer for '\ 8 | 'Chef, Berkshelf, Vagrant, and Packer, and tasks to orchestrate the '\ 9 | 'usage of each. https://github.com/rapid7/builderator' 10 | end 11 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-with-overrides: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a Buildfile in a project's working directory 3 | # without any cookbooks 4 | ## 5 | build_name 'builderator-with-overrides' 6 | 7 | policy(:test).path './Buildfile-policy-buildtype1' 8 | 9 | profile :default do |default| 10 | default.chef do |chef| 11 | chef.run_list ['rapid7-test::default'] 12 | end 13 | end 14 | 15 | profile :bake => Config.profile(:default) do |bake| 16 | bake.chef.run_list ['rapid7-test::override'] 17 | end 18 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-with-overrides2: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a Buildfile in a project's working directory 3 | # without any cookbooks 4 | ## 5 | build_name 'builderator-with-overrides' 6 | 7 | policy(:test).path './Buildfile-policy-buildtype2' 8 | 9 | profile :default do |default| 10 | default.chef do |chef| 11 | chef.run_list ['rapid7-test::default'] 12 | end 13 | end 14 | 15 | profile :bake => Config.profile(:default) do |bake| 16 | bake.chef.run_list ['rapid7-test::override'] 17 | end 18 | -------------------------------------------------------------------------------- /.rubocop_todo.yml: -------------------------------------------------------------------------------- 1 | # This configuration was generated by 2 | # `rubocop --auto-gen-config` 3 | # on 2015-12-08 11:59:48 -0500 using RuboCop version 0.35.1. 4 | # The point is for the user to remove these configuration records 5 | # one by one as the offenses are removed from the code base. 6 | # Note that changes in the inspected code, or installation of new 7 | # versions of RuboCop, may require this file to be generated again. 8 | 9 | 10 | # Configuration parameters: Exclude. 11 | AllCops: 12 | Exclude: 13 | - 'lib/builderator/model/cookbook.rb' 14 | - 'lib/builderator/tasks/cookbook.rb' 15 | -------------------------------------------------------------------------------- /template/Gemfile.erb: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | gem 'builderator', '<%= generator.project.current.builderator.version %>' 4 | 5 | <% if generator.project.current.vagrant.install -%> 6 | gem 'vagrant', :github => 'mitchellh/vagrant', 7 | :tag => '<%= generator.project.current.vagrant.version %>', 8 | :group => :development 9 | 10 | 11 | group :plugins do 12 | <% generator.project.current.vagrant.plugin.each do |pname, plugin| -%> 13 | gem '<%= pname %>'<% if plugin.has?(:version) %>'<%= plugin.version %>'<% end %> 14 | <% end -%> 15 | end 16 | <% end -%> 17 | -------------------------------------------------------------------------------- /spec/resource/Buildfile: -------------------------------------------------------------------------------- 1 | ## 2 | # Test Buildfile 3 | ## 4 | build_name 'builderator' 5 | 6 | autoversion.create_tags false 7 | autoversion.search_tags false 8 | 9 | cookbook do |cookbook| 10 | cookbook.depends 'apt' 11 | cookbook.depends 'etcd-v2' 12 | end 13 | 14 | ## Fetch Chef resources 15 | vendor :chef do |vendor| 16 | vendor.git 'git@github.com:rapid7/chef-demo-data.git' 17 | end 18 | 19 | profile :default do |profile| 20 | profile.chef.run_list 'apt::default', 'etcd-v2::node' 21 | 22 | profile.vagrant do |vagrant| 23 | vagrant.local do |local| 24 | local.memory 512 25 | end 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-with-block-device-mappings: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a Buildfile with block device mappings 3 | ## 4 | build_name 'builderator-with-block-device-mappings' 5 | 6 | profile :ami_mappings do |ami| 7 | ami.packer do |packer| 8 | packer.build :aws do |build| 9 | build.ami_block_device_mappings [{ 10 | 'device_name' => '/dev/sda', 11 | 'no_device' => true, 12 | }] 13 | end 14 | end 15 | end 16 | 17 | profile :launch_mappings do |ami| 18 | ami.packer do |packer| 19 | packer.build :aws do |build| 20 | build.launch_block_device_mappings [{ 21 | 'device_name' => '/dev/sda', 22 | 'no_device' => true, 23 | }] 24 | end 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /lib/builderator/interface/vagrant.rb: -------------------------------------------------------------------------------- 1 | require_relative '../interface' 2 | 3 | module Builderator 4 | # :nodoc: 5 | class Interface 6 | class << self 7 | def vagrant 8 | @vagrant ||= Vagrant.new 9 | end 10 | end 11 | 12 | ## 13 | # Render a temporary Vagrantfile 14 | ## 15 | class Vagrant < Interface 16 | command 'vagrant' 17 | template 'template/Vagrantfile.erb' 18 | 19 | def command 20 | c = '' 21 | c << 'ulimit -n 1024; ' if bundled? 22 | c << 'VAGRANT_I_KNOW_WHAT_IM_DOING_PLEASE_BE_QUIET=true ' if bundled? 23 | c << which 24 | end 25 | 26 | def source 27 | directory.join('Vagrantfile') 28 | end 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-home-directory: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a Buildfile in a user's home directory 3 | ## 4 | policy(:test).path './Buildfile-policy-organization' 5 | 6 | profile :default do |default| 7 | default.vagrant.ec2.ssh_host_attribute :public_ip_address 8 | default.vagrant.ec2.source_ami = 'my-source-ami' 9 | default.vagrant.ec2.subnet_id 'my-subnet-1' 10 | default.vagrant.ec2.security_groups 'my-sg-id', :mode => :override 11 | default.vagrant.ec2.iam_instance_profile_arn 'arn:aws:iam::xxxxxxxxxx:instance-profile/iam_profile_name' 12 | 13 | default.vagrant.local do |vagrant_virtualbox| 14 | vagrant_virtualbox.memory 2048 15 | end 16 | end 17 | 18 | vendor :test_policies do |vendor| 19 | vendor.path '.' 20 | end 21 | -------------------------------------------------------------------------------- /lib/builderator/util/aws_exception.rb: -------------------------------------------------------------------------------- 1 | require 'json' 2 | require_relative './task_exception' 3 | 4 | module Builderator 5 | module Util 6 | ## 7 | # Exception raised if a safety limit is exceeded 8 | ## 9 | class AwsException < TaskException 10 | attr_reader :exception 11 | 12 | def initialize(task, exception) 13 | super(:fail, task, :red) 14 | @exception = exception 15 | end 16 | 17 | def operation 18 | @exception.context.operation_name 19 | end 20 | 21 | def parameters 22 | @exception.context.params 23 | end 24 | 25 | def message 26 | "An error occured performing task #{ task }. #{ operation }"\ 27 | "(#{ JSON.generate(parameters) }): #{ exception.message }" 28 | end 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /lib/builderator/interface/berkshelf.rb: -------------------------------------------------------------------------------- 1 | require_relative '../interface' 2 | require_relative '../util' 3 | 4 | module Builderator 5 | # :nodoc: 6 | class Interface 7 | class << self 8 | def berkshelf 9 | @berkshelf ||= Berkshelf.new 10 | end 11 | end 12 | 13 | ## 14 | # Render an updated Berksfile 15 | ## 16 | class Berkshelf < Interface 17 | from_gem 'berkshelf' 18 | command 'berks' 19 | template 'template/Berksfile.erb' 20 | 21 | def vendor 22 | Config.local.cookbook_path 23 | end 24 | 25 | def lockfile 26 | directory.join('Berksfile.lock') 27 | end 28 | 29 | def berkshelf_config 30 | Config.cookbook.berkshelf_config 31 | end 32 | 33 | def source 34 | directory.join('Berksfile') 35 | end 36 | end 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /template/Buildfile.erb: -------------------------------------------------------------------------------- 1 | ## 2 | # Generated by `build generate` 3 | ## 4 | build_name '<%= build_name %>' 5 | 6 | ## Maven tags releases for now 7 | autoversion.create_tags false 8 | 9 | cookbook do |cookbook| 10 | # cookbook.depends 'apt' 11 | end 12 | 13 | profile :default do |profile| 14 | # profile.tags(:service => Config.build_name, 15 | # :version => Config.version, 16 | # :created => Config.date.iso8601) 17 | 18 | profile.chef do |chef| 19 | # chef.run_list 'apt::default' 20 | # chef.node_attrs() 21 | end 22 | 23 | ## Lookup the latest Ubuntu 14.04 Server (linux-3.13) AMI-ID 24 | # profile.vagrant.ec2.source_ami = 25 | # lookup(:image, 'filter' => 'ubuntu-14.04-daily').first.image_id 26 | # profile.packer.build(:default).source_ami = 27 | # lookup(:image, 'filter' => 'ubuntu-14.04-daily').first.image_id 28 | end 29 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-policy-organization: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a policy Buildfile for an organization's CI environment 3 | ## 4 | cookbook do |cb| 5 | cb.add_source 'https://supermarket.chef.io' 6 | end 7 | 8 | local do |local| 9 | local.data_bag_path relative('data_bags') 10 | local.environment_path relative('environments') 11 | end 12 | 13 | profile(:default) do |default| 14 | 15 | default.vagrant.ec2 do |ec2| 16 | ## VPN-attached VPC resources 17 | ec2.subnet_id 'my-subnet-id' 18 | ec2.security_groups 'my-sg-id' 19 | 20 | ec2.associate_public_ip false 21 | ec2.ssh_host_attribute :private_ip_address 22 | end 23 | 24 | default.packer.build(:default) do |build| 25 | build.ami_name [Config.build_name, Config.version, Config.build_number].reject(&:nil?).join('-') 26 | end 27 | end 28 | 29 | generator.project :default do |default| 30 | default.ruby.version '2.1.5' 31 | end 32 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-policy-buildtype1: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a policy Buildfile for a common type of build in an 3 | # organization's CI environment 4 | ## 5 | cookbook.depends 'shared-recipes' do |cookbook| 6 | cookbook.version '~> 2.0' 7 | end 8 | 9 | profile :default do |default| 10 | default.chef do |chef| 11 | chef.run_list 'shared-recipes::default' 12 | chef.environment = 'non-default' 13 | end 14 | 15 | ## Upload the generated code artifact to the image/VM 16 | default.artifact :code_bundle do |bundle| 17 | bundle.path 'target/bundle.tar.gz' 18 | bundle.destination ::File.join(Config.chef.staging_directory, 19 | 'cache/code-bundle.tar.gz') 20 | end 21 | end 22 | 23 | profile :bake => Config.profile(:default) do |bake| 24 | bake.chef.run_list 'shared-recipes::ami-bake', :mode => :override 25 | 26 | bake.packer do |packer| 27 | packer.build :default do |build| 28 | 29 | # build.ami_regions 'eu-central-1' - Packer doesn't support eu-central-1 30 | build.ami_users '0123456789' 31 | end 32 | end 33 | 34 | end 35 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 John Manero 2 | 3 | MIT License 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining 6 | a copy of this software and associated documentation files (the 7 | "Software"), to deal in the Software without restriction, including 8 | without limitation the rights to use, copy, modify, merge, publish, 9 | distribute, sublicense, and/or sell copies of the Software, and to 10 | permit persons to whom the Software is furnished to do so, subject to 11 | the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 20 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 22 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /lib/builderator/util/limit_exception.rb: -------------------------------------------------------------------------------- 1 | require_relative './task_exception' 2 | 3 | module Builderator 4 | module Util 5 | ## 6 | # Exception raised if a safety limit is exceeded 7 | ## 8 | class LimitException < TaskException 9 | DEFAULT_LIMIT = 4 10 | 11 | attr_reader :resource_name 12 | attr_reader :resources 13 | 14 | def initialize(resource_name, task, resources) 15 | super(:limit, task, :yellow) 16 | 17 | @resource_name = resource_name 18 | @resources = resources 19 | end 20 | 21 | def count 22 | @resources.size 23 | end 24 | 25 | def limit 26 | Config.cleaner.limits[resource_name] 27 | end 28 | 29 | def message 30 | msg = "Safety limit exceeded for task `#{task}`: Count #{count} is " 31 | msg << "greater than the limit of #{limit} set in `cleaner.limits.#{resource_name}`. " 32 | msg << 'Please re-run this task with the --force flag if you are sure this is '\ 33 | 'the correct set of resources to delete.' unless Config.cleaner.force 34 | 35 | msg 36 | end 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/builderator/model/cleaner.rb: -------------------------------------------------------------------------------- 1 | module Builderator 2 | module Model 3 | module Cleaner 4 | ## 5 | # Shared model interface 6 | ## 7 | class Base 8 | attr_reader :resources 9 | 10 | def initialize(*args) 11 | fetch(*args) 12 | end 13 | 14 | def fetch 15 | @resources = {} 16 | end 17 | 18 | def find(filters = {}) 19 | Util.filter(resources, filters) 20 | end 21 | 22 | def select(set = []) 23 | resources.select { |k, _| set.include?(k) } 24 | end 25 | 26 | def in_use 27 | find(Config.cleaner.filters) 28 | end 29 | 30 | def in_use?(key) 31 | @in_use ||= in_use 32 | 33 | @in_use.include?(key) 34 | end 35 | 36 | def unused 37 | resources.reject { |k, _| in_use?(k) } 38 | end 39 | end 40 | end 41 | end 42 | end 43 | 44 | require_relative './cleaner/images' 45 | require_relative './cleaner/instances' 46 | require_relative './cleaner/launch_configs' 47 | require_relative './cleaner/scaling_groups' 48 | require_relative './cleaner/snapshots' 49 | require_relative './cleaner/volumes' 50 | -------------------------------------------------------------------------------- /bin/build-data: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require 'thor' 4 | 5 | require_relative '../lib/builderator/config' 6 | require_relative '../lib/builderator/control/data' 7 | 8 | module Builderator 9 | module Tasks 10 | ## 11 | # Tasks to search AWS APIs 12 | ## 13 | class Data < Thor 14 | def initialize(*_) 15 | super 16 | 17 | Config.load(File.join(ENV['HOME'], '.builderator/Buildfile')) 18 | Config.load(Util.relative_path('Buildfile').to_s) 19 | 20 | Config.recompile 21 | end 22 | 23 | desc 'image ', 'Search for AMIs' 24 | method_option 'filter', :type => :string, :aliases => :f 25 | method_option 'latest', :type => :boolean, :aliases => :l, :default => false 26 | def image(*query) 27 | query = Hash[*query] 28 | 29 | ## Load a pre-defined filter 30 | query['filter'] = options['filter'] 31 | 32 | result = Control::Data.image(query) 33 | 34 | if options['latest'] 35 | puts result.first.image_id 36 | return 37 | end 38 | 39 | puts result.map(&:image_id).join(', ') 40 | end 41 | end 42 | end 43 | end 44 | 45 | Builderator::Tasks::Data.start(ARGV) 46 | -------------------------------------------------------------------------------- /docs/clean.md: -------------------------------------------------------------------------------- 1 | build-clean 2 | =========== 3 | Tasks to delete/deregister abandoned EC2 resources 4 | 5 | ### Options 6 | * `--commit` Execute cleanup task. Default behavior is to display resources that would be removed 7 | * `--filter KEY VALUE [KEY VALUE []]` Key/value pairs to filter resources. Valid keys include tags and native resource properties (See `describe` responses in the Ruby AWS-SDK) 8 | 9 | ### Commands 10 | * `configs` Delete launch configurations that are not associated with an autoscaling group. 11 | 12 | * `images` Delete images that are not associated with a launch configuration, a running instance, or are tagged as the 'parent' of an image that qualifies for any of the previous three conditions. Additionally, a fixed number of images can be retained per ordered groups. 13 | 14 | **Options** 15 | * `--group-by KEY [KEY []]` Tags/properties to group images by for pruning 16 | * `--sort-by KEY` Tag/property to sort grouped images on (Default: creation_date) 17 | * `--keep N` Number of images in each group to keep (Default: 0) 18 | 19 | * `snapshots` Delete snapshots that are not associated with existing volumes or images. 20 | 21 | * `volumes` Delete volumes that are not attached to instances. 22 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-policy-buildtype2: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a policy Buildfile for a common type of build in an 3 | # organization's CI environment 4 | ## 5 | cookbook.depends 'shared-recipes' do |cookbook| 6 | cookbook.version '~> 2.0' 7 | end 8 | 9 | profile :default do |default| 10 | default.tags(:service => Config.build_name, 11 | :version => "#{Config.version}-#{Config.build_number}", 12 | :created => Config.date.iso8601) 13 | 14 | default.chef do |chef| 15 | chef.run_list 'shared-recipes::default' 16 | chef.environment = 'non-default' 17 | end 18 | 19 | ## Upload the generated code artifact to the image/VM 20 | default.artifact :code_bundle do |bundle| 21 | bundle.path 'target/bundle.tar.gz' 22 | bundle.destination ::File.join(Config.chef.staging_directory, 23 | 'cache/code-bundle.tar.gz') 24 | end 25 | end 26 | 27 | profile :bake => Config.profile(:default) do |bake| 28 | bake.tags(:parent => 'some-value') ## TODO This triggers an issue still. 29 | 30 | bake.chef.run_list 'shared-recipes::ami-bake', :mode => :override 31 | 32 | bake.packer do |packer| 33 | packer.build :default do |build| 34 | 35 | # build.ami_regions 'eu-central-1' - Packer doesn't support eu-central-1 36 | build.ami_users '0123456789' 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/builderator/control/data/ecr.rb: -------------------------------------------------------------------------------- 1 | require 'aws-sdk' 2 | require 'date' 3 | 4 | require_relative '../../util' 5 | 6 | module Builderator 7 | module Control 8 | # :nodoc: 9 | module Data 10 | # Lookup ECR repository info 11 | # 12 | # NB. We want to embed the login_server info into the returned repo data for 13 | # ease of use. Thus, instead of an AWS struct-type, we get a hash with the 14 | # injected value. 15 | def self.repository(query = {}) 16 | ECR.search(query).map do |repo| 17 | repo.to_h.tap { |r| r[:login_server] = "https://#{repo.repository_uri.sub(repo.repository_name, '')}" } 18 | end 19 | end 20 | end 21 | 22 | ## 23 | # Find ECR repositories for sources 24 | ## 25 | module ECR 26 | class << self 27 | def search(query = {}) 28 | options = {} 29 | 30 | options['repository_names'] = Util.to_array(query.delete('name')) if query.include?('name') 31 | options['registry_id'] = query.delete('owner') if query.include?('owner') 32 | 33 | Util.ecr.describe_repositories(options) 34 | .each_with_object([]) { |page, repositories| repositories.push(*page.repositories) } 35 | .sort { |a, b| a.repository_name <=> b.repository_name } 36 | end 37 | end 38 | end 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/builderator/model/cleaner/volumes.rb: -------------------------------------------------------------------------------- 1 | require 'aws-sdk' 2 | require_relative '../../util' 3 | 4 | module Builderator 5 | module Model 6 | # :nodoc: 7 | module Cleaner 8 | def self.volumes 9 | @volumes ||= Volumes.new 10 | end 11 | 12 | ## 13 | # EC2 Volume Resources 14 | ## 15 | class Volumes < Model::Cleaner::Base 16 | PROPERTIES = %w(size availability_zone state volume_type iops) 17 | 18 | def fetch 19 | @resources = {}.tap do |v| 20 | Util.ec2.describe_volumes.each do |page| 21 | page.volumes.each do |vol| 22 | properties = Util.from_tags(vol.tags) 23 | properties['creation_date'] = vol.create_time.to_datetime 24 | PROPERTIES.each { |pp| properties[pp] = vol[pp.to_sym] } 25 | 26 | v[vol.volume_id] = { 27 | :id => vol.volume_id, 28 | :properties => properties, 29 | :snapshot => vol.snapshot_id 30 | } 31 | end 32 | end 33 | end 34 | end 35 | 36 | def snapshots 37 | resources.values.map { |v| v[:snapshot] } 38 | end 39 | 40 | def in_use 41 | {}.tap do |used| 42 | used.merge!(select(Cleaner.instances.volumes)) 43 | end 44 | end 45 | end 46 | end 47 | end 48 | end 49 | -------------------------------------------------------------------------------- /lib/builderator/control/version/git.rb: -------------------------------------------------------------------------------- 1 | require_relative './scm' 2 | require_relative '../../util' 3 | 4 | module Builderator 5 | module Control 6 | # :nodoc: 7 | class Version 8 | ## 9 | # SCM implementation for Git 10 | ## 11 | module Git 12 | extend SCM 13 | 14 | COMMIT_FORMAT = /^(?[a-f0-9]+)(?:\s+\((?.+?)\))?\s+(?.+)$/ 15 | TAG_FORMAT = %r{tag: ([a-zA-Z0-9\.\-\+/_]+)} 16 | 17 | ## Is there a .git repo in the project root? 18 | def self.supported? 19 | return true if ENV['GIT_DIR'] && File.exist?(ENV['GIT_DIR']) 20 | 21 | Util.relative_path('.git').exist? 22 | end 23 | 24 | def self._history 25 | `git log --pretty='format:%H %d %s' HEAD`.chomp 26 | .split("\n") 27 | .map { |string| string.match(COMMIT_FORMAT) } 28 | .reject(&:nil?) 29 | .map do |commit| 30 | { 31 | :id => commit[:hash], 32 | :message => commit[:message] 33 | }.tap do |c| 34 | tag_match = commit[:tags].scan(TAG_FORMAT) 35 | .flatten 36 | .reject(&:nil?) unless commit[:tags].nil? 37 | 38 | c[:tags] = tag_match unless tag_match.nil? || tag_match.empty? 39 | end 40 | end 41 | end 42 | end 43 | 44 | SCM.register(Git) 45 | end 46 | end 47 | end 48 | -------------------------------------------------------------------------------- /template/README.md.erb: -------------------------------------------------------------------------------- 1 | <%= build_name %> 2 | ===== 3 | 4 | ## Usage 5 | 6 | ### Install dependencies 7 | 8 | 1. If you have not already done so, configure [`rbenv`](https://github.com/rbenv/rbenv#installation) 9 | and [`ruby-build`](https://github.com/rbenv/ruby-build#installation) on your 10 | workstation and [install Ruby 2.1.5](https://github.com/rbenv/rbenv#installing-ruby-versions). 11 | 1. Ensure that a supported version of [bundler](http://bundler.io/) is installed (`">= 1.5.2", "<= 1.10.6"`): 12 | 13 | ```bash 14 | gem install bundler -v 1.10.6 15 | ``` 16 | 17 | 1. Use `bundler` to install application-specific dependencies" 18 | 19 | ```bash 20 | bundle install 21 | ``` 22 | 23 | 1. **Optionally**, add an alias for `build`. In `$HOME/.profile` or `$HOME/bash_profile` 24 | add the following, then reload your profile: 25 | 26 | ```bash 27 | alias build="bundle exec build" 28 | ``` 29 | 30 | ```bash 31 | . ~/.profile 32 | ``` 33 | 34 | **All subsequent examples with a `build` command assume that you've created an alias 35 | or are prepending `bundle exec` to the command string** 36 | 37 | ### Start a local Vagrant VM, then SSH into it. 38 | 39 | ```bash 40 | build local 41 | build vagrant ssh 42 | ``` 43 | 44 | To clean up VMs, run 45 | 46 | ```bash 47 | build vagrant destroy 48 | ``` 49 | 50 | ### Launch an EC2 instance 51 | 52 | ```bash 53 | build ec2 54 | build vagrant ssh 55 | ``` 56 | 57 | ### Clean up VMs and local generated resources 58 | 59 | ```bash 60 | build clean 61 | ``` 62 | -------------------------------------------------------------------------------- /lib/builderator/model/cleaner/scaling_groups.rb: -------------------------------------------------------------------------------- 1 | require 'aws-sdk' 2 | require_relative '../../util' 3 | 4 | module Builderator 5 | module Model 6 | # :nodoc: 7 | module Cleaner 8 | def self.scaling_groups 9 | @scaling_groups ||= ScalingGroups.new 10 | end 11 | 12 | ## 13 | # AutoScaling Group Resoruces 14 | ## 15 | class ScalingGroups < Model::Cleaner::Base 16 | attr_reader :resources 17 | PROPERTIES = %w(auto_scaling_group_arn min_size max_size desired_capacity 18 | default_cooldown availability_zones load_balancer_names 19 | vpc_zone_identifier status termination_policies) 20 | 21 | def fetch 22 | @resources = {}.tap do |i| 23 | Util.asg.describe_auto_scaling_groups.each do |page| 24 | page.auto_scaling_groups.each do |a| 25 | properties = Util.from_tags(a.tags) 26 | properties['creation_date'] = a.created_time.to_datetime 27 | PROPERTIES.each { |pp| properties[pp] = a[pp.to_sym] } 28 | 29 | i[a.launch_configuration_name] = { 30 | :id => a.auto_scaling_group_name, 31 | :properties => properties, 32 | :config => a.launch_configuration_name 33 | } 34 | end 35 | end 36 | end 37 | end 38 | 39 | def launch_configs 40 | resources.values.map { |g| g[:config] } 41 | end 42 | end 43 | end 44 | end 45 | end 46 | -------------------------------------------------------------------------------- /lib/builderator/model/cleaner/launch_configs.rb: -------------------------------------------------------------------------------- 1 | require 'aws-sdk' 2 | require_relative '../../util' 3 | 4 | module Builderator 5 | module Model 6 | # :nodoc: 7 | module Cleaner 8 | def self.launch_configs 9 | @launch_configs ||= LaunchConfigs.new 10 | end 11 | 12 | ## 13 | # ASG LaunchConfiguration Resources 14 | ## 15 | class LaunchConfigs < Model::Cleaner::Base 16 | PROPERTIES = %w(launch_configuration_arn key_name security_groups 17 | user_data instance_type spot_price iam_instance_profile 18 | ebs_optimized associate_public_ip_address placement_tenancy) 19 | 20 | def fetch 21 | @resources = {}.tap do |i| 22 | Util.asg.describe_launch_configurations.each do |page| 23 | page.launch_configurations.each do |l| 24 | properties = { 'creation_date' => l.created_time.to_datetime } 25 | PROPERTIES.each { |pp| properties[pp] = l[pp.to_sym] } 26 | 27 | i[l.launch_configuration_name] = { 28 | :id => l.launch_configuration_name, 29 | :properties => properties, 30 | :image => l.image_id 31 | } 32 | end 33 | end 34 | end 35 | end 36 | 37 | def images 38 | resources.values.map { |l| l[:image] } 39 | end 40 | 41 | def in_use 42 | select(Cleaner.scaling_groups.launch_configs) 43 | end 44 | end 45 | end 46 | end 47 | end 48 | -------------------------------------------------------------------------------- /lib/builderator/model/cleaner/snapshots.rb: -------------------------------------------------------------------------------- 1 | require 'aws-sdk' 2 | require_relative '../../util' 3 | 4 | module Builderator 5 | module Model 6 | # :nodoc: 7 | module Cleaner 8 | def self.snapshots 9 | @snapshots ||= Snapshots.new 10 | end 11 | 12 | ## 13 | # EC2 Snapshot Resources 14 | ## 15 | class Snapshots < Model::Cleaner::Base 16 | PROPERTIES = %w(state owner_id description volume_size) 17 | 18 | def fetch 19 | @resources = {}.tap do |s| 20 | Util.ec2.describe_snapshots(:filters => [ 21 | { 22 | :name => 'status', 23 | :values => %w(completed) 24 | } 25 | ], :owner_ids => ['self']).each do |page| 26 | page.snapshots.each do |snap| 27 | properties = Util.from_tags(snap.tags) 28 | properties['creation_date'] = snap.start_time.to_datetime 29 | PROPERTIES.each { |pp| properties[pp] = snap[pp.to_sym] } 30 | 31 | s[snap.snapshot_id] = { 32 | :id => snap.snapshot_id, 33 | :properties => properties, 34 | :volume => snap.volume_id 35 | } 36 | end 37 | end 38 | end 39 | end 40 | 41 | def in_use 42 | {}.tap do |used| 43 | used.merge!(select(Cleaner.volumes.snapshots)) 44 | used.merge!(select(Cleaner.images.snapshots)) 45 | end 46 | end 47 | end 48 | end 49 | end 50 | end 51 | -------------------------------------------------------------------------------- /template/gitignore.erb: -------------------------------------------------------------------------------- 1 | # Compiled source # 2 | ################### 3 | *.com 4 | *.class 5 | *.dll 6 | *.exe 7 | *.o 8 | *.so 9 | 10 | # Packages # 11 | ############ 12 | # it's better to unpack these files and commit the raw source 13 | # git has its own built in compression methods 14 | *.7z 15 | *.dmg 16 | *.gz 17 | *.iso 18 | *.jar 19 | *.rar 20 | *.tar 21 | *.zip 22 | 23 | # Logs and databases # 24 | ###################### 25 | *.log 26 | 27 | # OS generated files # 28 | ###################### 29 | .DS_Store* 30 | ehthumbs.db 31 | Icon? 32 | Thumbs.db 33 | 34 | # Editor Files # 35 | ################ 36 | *~ 37 | *# 38 | .#* 39 | \#*# 40 | .*.sw[a-z] 41 | *.un~ 42 | pkg/ 43 | 44 | # Berkshelf 45 | /cookbooks 46 | Berksfile.lock 47 | 48 | # Bundler 49 | bin/* 50 | .bundle/* 51 | 52 | VERSION 53 | bundle.tar.gz 54 | metadata.json 55 | vendor/ 56 | *.swp 57 | 58 | .vagrant 59 | .builderator/* 60 | 61 | # Gradle Files # 62 | ################ 63 | .gradle 64 | 65 | # Build output directies 66 | /target 67 | */target 68 | /build 69 | */build 70 | 71 | # IntelliJ specific files/directories 72 | out 73 | .idea 74 | *.ipr 75 | *.iws 76 | *.iml 77 | atlassian-ide-plugin.xml 78 | 79 | # Eclipse specific files/directories 80 | .classpath 81 | .checkstyle 82 | .project 83 | .settings 84 | .metadata 85 | .checkstyle 86 | */bin 87 | test-output 88 | 89 | # NetBeans specific files/directories 90 | .nbattrs 91 | 92 | # JavaScript 93 | /node_modules 94 | /bower_components 95 | 96 | # Maven 97 | /.repostory/ 98 | /test-output/ 99 | /logs/ 100 | 101 | # checkstyle 102 | .checkstyle 103 | /logs/ 104 | /bin/ 105 | -------------------------------------------------------------------------------- /lib/builderator/interface.rb: -------------------------------------------------------------------------------- 1 | require 'erb' 2 | require 'fileutils' 3 | require 'securerandom' 4 | 5 | require_relative './config/attributes' 6 | require_relative './config/rash' 7 | require_relative './util' 8 | 9 | module Builderator 10 | ## 11 | # Base class for integration interfaces 12 | ## 13 | class Interface 14 | class << self 15 | def command(arg = nil) 16 | @command = arg unless arg.nil? 17 | @command 18 | end 19 | 20 | def from_gem(arg = nil) 21 | @from_gem = arg unless arg.nil? 22 | @from_gem || @command 23 | end 24 | 25 | def template(arg = nil) 26 | @template = arg unless arg.nil? 27 | @template 28 | end 29 | end 30 | 31 | ## Is vagrant in this bundle? 32 | def bundled? 33 | Gem.loaded_specs.key?(self.class.from_gem) 34 | end 35 | 36 | def which 37 | return self.class.command if bundled? 38 | 39 | ## Not in the bundle. Use system path 40 | `which #{self.class.command}`.chomp.tap { |path| fail "Unable to locate a #{self.class.command} executable" if path.empty? } 41 | end 42 | alias_method :command, :which 43 | 44 | def directory 45 | Util.workspace 46 | end 47 | 48 | def render 49 | ERB.new(Util.source_path(self.class.template).binread, 50 | nil, '-', '@output_buffer').result(Config.instance_eval('binding')) 51 | end 52 | 53 | def source 54 | fail 'Interface does not provide a source!' 55 | end 56 | 57 | def write 58 | directory.mkpath 59 | source.write(render) 60 | self 61 | end 62 | 63 | def clean 64 | source.unlink 65 | end 66 | end 67 | end 68 | -------------------------------------------------------------------------------- /docs/configuration/cookbook.md: -------------------------------------------------------------------------------- 1 | cookbook 2 | ======== 3 | 4 | * `path` The path to a local cookbook source, including a valid `metadata.rb` file. 5 | * `sources, type: list, singular: add_source, unique: true` Supermarket APIs to resolve cookbook dependencies from 6 | * `metadata` Boolean. Read dependencies from local cookbook metadata. 7 | 8 | ## `depends name` 9 | 10 | Collection of declared cookbook dependencies. Options are passed to [Berkshelf](http://berkshelf.com/). Check out their docs for additional details. 11 | 12 | * `version` A version constraint spec for the cookbook 13 | * `git` A git URI from which to fetch the cookbook 14 | * `GitHub` A GitHub URL from which to fetch the cookbook 15 | * `branch` A branch reference from which to fetch the cookbook 16 | * `tag` A tag reference from which to fetch the cookbook 17 | * `ref` A comittish reference from which to fetch the cookbook 18 | * `rel` The sub-directory of a git repository to check out as a cookbook 19 | * `path` The path to a local cookbook, relative to the build workspace. 20 | 21 | ## Tasks 22 | 23 | * `berks metadata COOKBOOK` Generates a `metadata.json` file from a local cookbook's `metadata.rb` file. The specified `COOKBOOK` must be in the `cookbook.depends` collection with a valid `path` attribute. 24 | * `berks vendor` Resolve and fetch cookbooks for the `cookbook.depends` collection and store in `$VENDOR_PATH/cookbooks` 25 | * `berks upload` Upload the resolved dependency set for `cookbook.depends` to the Chef Server configured in Berkshelf's configuration (default `$HOME/.berkshelf/config.json`) 26 | * `berks clean` Removes the project's cookbook vendor cache. 27 | * `berks uncache` is a helper to clear Berkshelf's host-cache, in `$HOME/.berkshelf/cookbooks` by default. 28 | -------------------------------------------------------------------------------- /lib/builderator/config/list.rb: -------------------------------------------------------------------------------- 1 | module Builderator 2 | module Config 3 | ## 4 | # Extend Array with context about how its values should be merged with other 5 | # configuration layers. Possible modes are: 6 | # 7 | # * 'override' - Do not merge. Replace the other node's elements 8 | # * 'union' - Perform a set-union on the elements of this and the other node 9 | ## 10 | class List < Array 11 | class << self 12 | def coerce(somehting, options = {}) 13 | return somehting if somehting.is_a?(self) 14 | return new(options).push(*somehting) if somehting.is_a?(Array) 15 | 16 | ## `somehting` is not a valid input. Just give back an instance. 17 | new([], options) 18 | end 19 | end 20 | 21 | attr_reader :mode 22 | 23 | def initialize(from = nil, **options) 24 | @mode = options.fetch(:mode, :union) 25 | 26 | merge!(from) unless from.nil? 27 | end 28 | 29 | def clone 30 | self.class.new(self, :mode => mode) 31 | end 32 | 33 | def set(*elements) 34 | clear 35 | push(*elements) 36 | end 37 | 38 | ## 39 | # Combine elements with `other` according to `other`'s `mode` 40 | ## 41 | def merge!(other) 42 | other = self.class.coerce(other) 43 | 44 | case other.mode 45 | when :override 46 | return false if self == other 47 | set(*other) 48 | 49 | when :union 50 | merged = self | other 51 | return false if merged == self 52 | 53 | set(*merged) 54 | 55 | else 56 | fail "Invalid List mode #{other.mode}!" 57 | end 58 | 59 | true 60 | end 61 | end 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /lib/builderator/control/version/auto.rb: -------------------------------------------------------------------------------- 1 | module Builderator 2 | module Control 3 | class Version 4 | ## 5 | # Search through commits since current version for #TYPE tags 6 | # 7 | # Included in Version 8 | ## 9 | module Auto 10 | DEFAULT_TYPE = 'patch'.freeze 11 | MESSAGE_KEYWORDS = /#(?build|prerelease|release|patch\-prerelease|patch|minor\-prerelease|minor|major\-prerelease|major)(?:=(?[a-zA-Z0-9\-_]+))?/ 12 | 13 | def auto_type 14 | fail 'Version-bump type `auto` is unsuppoeted for this SCM. Version does not'\ 15 | ' have a valid `ref` value' if ref.nil? 16 | 17 | ## Get commits since self.ref (e.g. commits since this tag) 18 | history_since_current = SCM.history.take_while do |commit| 19 | commit.id != ref 20 | end 21 | 22 | ## Search for the highest-precedence #TAG in those commit messages 23 | ## Search from oldest-to-newest. Newer #TAGs of equal precedence win 24 | result = history_since_current.reverse.reduce(nil) do |highest, commit| 25 | ## Not going to bother parsing multiple matches. If you're 26 | ## putting more than one #TYPE in your commit message, you 27 | ## deserve what you get... 28 | found_type = commit.message.scan(MESSAGE_KEYWORDS).first 29 | 30 | ## No #TYPE in message 31 | next highest if found_type.nil? 32 | 33 | ## First match 34 | next found_type if highest.nil? 35 | 36 | ## Retrun higher precedence release type 37 | RELEASE_TYPES[found_type.first.to_s] <= RELEASE_TYPES[highest.first.to_s] ? found_type : highest 38 | end 39 | 40 | return ['prerelease', nil] if result.nil? && is_prerelease 41 | return [DEFAULT_TYPE, nil] if result.nil? 42 | 43 | result 44 | end 45 | end 46 | end 47 | end 48 | end 49 | -------------------------------------------------------------------------------- /spec/resource/Buildfile-with-post-processors: -------------------------------------------------------------------------------- 1 | ## 2 | # This test file simulates a Buildfile with a set of post-processors 3 | ## 4 | build_name 'builderator-with-post-processors' 5 | 6 | profile :single do |single| 7 | single.packer do |packer| 8 | packer.build :docker do |build| 9 | build.commit true 10 | build.image 'openjdk:8-jdk' 11 | end 12 | 13 | packer.post_processors [ 14 | 'docker-push' 15 | ] 16 | end 17 | end 18 | 19 | profile :complex do |complex| 20 | complex.packer do |packer| 21 | packer.build :docker do |build| 22 | build.commit true 23 | build.image 'openjdk:8-jdk' 24 | end 25 | 26 | packer.post_processors [ 27 | { 28 | :type => 'docker-tag', 29 | :repository => 'rapid7/builderator', 30 | :tag => 'latest' 31 | } 32 | ] 33 | end 34 | end 35 | 36 | profile :sequence do |sequence| 37 | sequence.packer do |packer| 38 | packer.build :docker do |build| 39 | build.commit true 40 | build.image 'openjdk:8-jdk' 41 | end 42 | 43 | packer.post_processors [ 44 | [ 45 | { 46 | :type => 'docker-tag', 47 | :repository => 'rapid7/builderator', 48 | :tag => 'latest' 49 | }, 50 | 'docker-push' 51 | ] 52 | ] 53 | end 54 | end 55 | 56 | profile :multiple_sequences do |sequence| 57 | sequence.packer do |packer| 58 | packer.build :docker do |build| 59 | build.commit true 60 | build.image 'openjdk:8-jdk' 61 | end 62 | 63 | packer.post_processors [ 64 | [ 65 | { 66 | :type => 'docker-tag', 67 | :repository => 'rapid7/builderator', 68 | :tag => '1.2.2' 69 | }, 70 | 'docker-push' 71 | ], 72 | [ 73 | { 74 | :type => 'docker-tag', 75 | :repository => 'rapid7/builderator', 76 | :tag => 'latest' 77 | }, 78 | 'docker-push' 79 | ] 80 | ] 81 | end 82 | end 83 | -------------------------------------------------------------------------------- /lib/builderator/model/cleaner/instances.rb: -------------------------------------------------------------------------------- 1 | require 'aws-sdk' 2 | require_relative '../../util' 3 | 4 | module Builderator 5 | module Model 6 | # :nodoc: 7 | module Cleaner 8 | def self.instances 9 | @instances ||= Instances.new 10 | end 11 | 12 | ## 13 | # EC2 Instance resources 14 | ## 15 | class Instances < Model::Cleaner::Base 16 | PROPERTIES = %w(private_dns_name public_dns_name instance_type 17 | subnet_id vpc_id private_ip_address public_ip_address 18 | architecture root_device_type virtualization_type 19 | hypervisor) 20 | 21 | def fetch 22 | @resources = {}.tap do |i| 23 | Util.ec2.describe_instances(:filters => [ 24 | { 25 | :name => 'instance-state-name', 26 | :values => %w(pending running shutting-down stopping stopped) 27 | } 28 | ]).each do |page| 29 | page.reservations.each do |r| 30 | r.instances.each do |instance| 31 | properties = Util.from_tags(instance.tags) 32 | properties['availability_zone'] = instance.placement.availability_zone 33 | properties['creation_date'] = instance.launch_time.to_datetime 34 | PROPERTIES.each { |pp| properties[pp] = instance[pp.to_sym] } 35 | 36 | i[instance.instance_id] = { 37 | :id => instance.instance_id, 38 | :image => instance.image_id, 39 | :volumes => instance.block_device_mappings.map { |b| b.ebs.volume_id }, 40 | :properties => properties 41 | } 42 | end 43 | end 44 | end 45 | end 46 | end 47 | 48 | def images 49 | resources.values.map { |i| i[:image] } 50 | end 51 | 52 | def volumes 53 | resources.values.map { |i| i[:volumes] }.flatten 54 | end 55 | end 56 | end 57 | end 58 | end 59 | -------------------------------------------------------------------------------- /lib/builderator/tasks/version.rb: -------------------------------------------------------------------------------- 1 | require 'thor' 2 | 3 | require_relative '../patch/thor-actions' 4 | require_relative '../control/version' 5 | require_relative '../util' 6 | 7 | module Builderator 8 | module Tasks 9 | ## 10 | # Tasks to detect and increment package versions 11 | ## 12 | class Version < Thor 13 | include Thor::Actions 14 | 15 | def self.exit_on_failure? 16 | true 17 | end 18 | 19 | class_option :git_dir, desc: 'The .git directory to use (default: .git)' 20 | 21 | desc 'current', 'Print the current version and write it to file' 22 | def current 23 | # Workaround for singleton git provider not supporting alternate path. 24 | ENV['GIT_DIR'] = options[:git_dir] if options[:git_dir] 25 | 26 | unless Config.autoversion.search_tags 27 | say_status :disabled, 'Automatically detecting version information '\ 28 | 'from SCM tags is disabled', :red 29 | return 30 | end 31 | 32 | say_status :version, "#{Control::Version.current} (#{Control::Version.current.ref})" 33 | Control::Version.write 34 | Control::Version.set_config_version 35 | end 36 | 37 | desc 'bump TYPE [PRERELEASE_NAME]', 'Increment the package version, optionally with a named prerelease' 38 | def bump(type = :auto, prerelease_name = nil) 39 | # Workaround for singleton git provider not supporting alternate path. 40 | ENV['GIT_DIR'] = options[:git_dir] if options[:git_dir] 41 | 42 | ## Guard: Don't try to create a new version if `create_tags` is explicitly disabled 43 | ## or `search_tags` is disabled as we won't have a valid current version to increment 44 | unless Config.autoversion.create_tags && Config.autoversion.search_tags 45 | say_status :disabled, 'Tag creation is disabled for this build. Not '\ 46 | 'creating new SCM tags!', :red 47 | 48 | ## Try to read the current version anyway, incase `search_tags == true` 49 | current 50 | 51 | return 52 | end 53 | 54 | say_status :bump, "by #{type} version" 55 | Control::Version.bump(type, prerelease_name) 56 | 57 | ## Print the new version and write out a VERSION file 58 | current 59 | 60 | ## Try to create and push a tag 61 | run "git tag #{Control::Version.current}" 62 | run 'git push --tags' 63 | end 64 | end 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /lib/builderator/control/version/comparable.rb: -------------------------------------------------------------------------------- 1 | module Builderator 2 | module Control 3 | class Version 4 | ## 5 | # Sort earliest -> latest 6 | # (Array.last -> latest (e.g. 1.0.0), Array.first -> earliest(e.g. 0.0.1)) 7 | ## 8 | module Comparable 9 | include ::Comparable 10 | 11 | def <=>(other) 12 | ## Simple version comparison 13 | return major <=> other.major unless same?(:major, other) 14 | return minor <=> other.minor unless same?(:minor, other) 15 | return patch <=> other.patch unless same?(:patch, other) 16 | 17 | ## Prereleases: prerelease < non-prerelease 18 | return compare(:is_prerelease, other) if one?(:is_prerelease, other) 19 | 20 | if both?(:is_prerelease, other) 21 | ## This is a little sketchy... We're assuming that pre-releases 22 | ## have a lexicological order. 23 | return prerelease_name <=> other.prerelease_name unless same?(:prerelease_name, other) 24 | return prerelease_iteration <=> other.prerelease_iteration unless same?(:prerelease_iteration, other) 25 | end 26 | 27 | ## Build number. With build number > without build number 28 | compare(:build, other) 29 | end 30 | 31 | private 32 | 33 | ## this == that 34 | def same?(parameter, other) 35 | send(parameter) == other.send(parameter) 36 | end 37 | 38 | ## this && that 39 | def both?(parameter, other) 40 | send(parameter) && other.send(parameter) 41 | end 42 | 43 | ## this ^ that (XOR) 44 | def one?(parameter, other) 45 | (send(parameter)) ^ (other.send(parameter)) 46 | end 47 | 48 | ## this || that 49 | def either?(parameter, other) 50 | send(parameter) || other.send(parameter) 51 | end 52 | 53 | ## !(this || that) 54 | def neither?(parameter, other) 55 | !either?(parameter, other) 56 | end 57 | 58 | ## Compare with support for `nil` values 59 | def compare(parameter, other) 60 | a = send(parameter) 61 | b = other.send(parameter) 62 | 63 | ## NilClass, TrueClass, and FalseClass' <=> operators return nil 64 | return a <=> b unless a.nil? || b.nil? || 65 | a.is_a?(TrueClass) || b.is_a?(TrueClass) || 66 | a.is_a?(FalseClass) || b.is_a?(FalseClass) 67 | 68 | return 1 if a && !b 69 | return -1 if !a && b 70 | 71 | ## a && b || !a && !b 72 | 0 73 | end 74 | end 75 | end 76 | end 77 | end 78 | -------------------------------------------------------------------------------- /lib/builderator/control/version/bump.rb: -------------------------------------------------------------------------------- 1 | module Builderator 2 | module Control 3 | class Version 4 | ## 5 | # Increment version's parameters by specified steps 6 | # 7 | # Included in Version 8 | ## 9 | module Bump 10 | def bump(type = 'auto', prerelease_name = nil) # rubocop:disable Metrics/PerceivedComplexity 11 | ## Grok commits since current for a #TYPE string 12 | type, prerelease_name = auto_type if type.to_s == 'auto' 13 | 14 | fail "Unrecognized release type #{type}" unless RELEASE_TYPES.include?(type.to_s) 15 | type_num = RELEASE_TYPES[type.to_s] 16 | 17 | ## 18 | # Reset lower-precendence parameters to nil/0 19 | ## 20 | self.build = nil if type_num < RELEASE_TYPES['build'] 21 | 22 | ## Clear pre-release flags 23 | if type_num < RELEASE_TYPES['prerelease'] 24 | self.is_prerelease = false 25 | self.prerelease_name = nil 26 | self.prerelease_iteration = nil 27 | end 28 | 29 | self.patch = 0 if type_num < RELEASE_TYPES['patch'] 30 | self.minor = 0 if type_num < RELEASE_TYPES['minor'] 31 | self.major = 0 if type_num < RELEASE_TYPES['major'] 32 | 33 | ## Set new version's ref 34 | self.ref = SCM.history.first.id 35 | 36 | ## 37 | # Increment specified parameters 38 | ## 39 | case type.to_s 40 | when 'build' 41 | if build.nil? 42 | self.build = 0 43 | else 44 | self.build += 1 45 | end 46 | 47 | when 'prerelease' 48 | ## Start a prerelease train from a new patch version 49 | ## if it doesn't already exist 50 | self.patch += 1 unless is_prerelease 51 | prerelease(prerelease_name) 52 | 53 | when 'release' 54 | ## Remove pre-release parameters from the current patch 55 | ## (already done above ^^) 56 | 57 | when 'patch-prerelease' 58 | ## Force a new pre-release train from a new patch version 59 | self.patch += 1 60 | prerelease(prerelease_name) 61 | 62 | when 'patch' then self.patch += 1 63 | 64 | when 'minor-prerelease' 65 | self.minor += 1 66 | prerelease(prerelease_name) 67 | 68 | when 'minor' then self.minor += 1 69 | 70 | when 'major-prerelease' 71 | self.major += 1 72 | prerelease(prerelease_name) 73 | 74 | when 'major' then self.major += 1 75 | end 76 | 77 | self 78 | end 79 | end 80 | end 81 | end 82 | end 83 | -------------------------------------------------------------------------------- /lib/builderator/control/version/scm.rb: -------------------------------------------------------------------------------- 1 | module Builderator 2 | module Control 3 | class Version 4 | ## 5 | # Generic SCM interface 6 | ## 7 | module SCM 8 | ## Fetch and cache history for the current HEAD/TIP 9 | def history 10 | @history ||= _history.map { |commit| Commit.new(commit) } 11 | end 12 | 13 | ## Find all tags in the branch's history 14 | def tags 15 | @tags ||= _tags 16 | .map { |tag, ref| Version.from_string(tag, :ref => ref) } 17 | .compact 18 | .sort 19 | end 20 | 21 | ## 22 | # OVERRIDE: Return true if this provider will work for `path` 23 | ## 24 | def supported? 25 | fail 'Method `supported?` must be implemented in SCM providers!' 26 | end 27 | 28 | ## 29 | # OVERRIDE: Return an array of hashes with keys 30 | # - id -> SCM commit identity 31 | # - message -> SCM commit message 32 | # - tags -> nil or an array of strings 33 | ## 34 | def _history 35 | fail 'Method `_history` must be implemented in SCM providers!' 36 | end 37 | 38 | ## 39 | # OVERRIDE: Return an array of [tag, commit-id] tuples 40 | ## 41 | def _tags 42 | history.reject { |commit| commit.tags.empty? } 43 | .map { |commit| commit.tags.map { |tag| [tag, commit.id] } } 44 | .each_with_object([]) { |commit, tags| tags.push(*commit) } 45 | end 46 | 47 | class << self 48 | def history 49 | provider.history 50 | end 51 | 52 | def tags 53 | provider.tags 54 | end 55 | 56 | def register(klass) 57 | fail 'Provider module must extend '\ 58 | 'Builderator::Control::Version::SCM' unless 59 | klass.singleton_class.include?(SCM) 60 | 61 | ## Make newer providers override those with the same capability 62 | providers.unshift(klass) 63 | end 64 | 65 | def unregister(klass) 66 | providers.delete(klass) 67 | end 68 | 69 | def providers 70 | @providers ||= [] 71 | end 72 | 73 | ## Find a version provider for this build 74 | def provider 75 | providers.find(&:supported?).tap do |found| 76 | fail 'Builderator::Control::Version: '\ 77 | 'Unsupported SCM' if found.nil? 78 | end 79 | end 80 | end 81 | 82 | ## An SCM commit entity 83 | class Commit 84 | attr_reader :id 85 | attr_reader :message 86 | attr_reader :tags 87 | 88 | def initialize(match) 89 | @id = match[:id] 90 | @message = match[:message] 91 | @tags = match.fetch(:tags, []) 92 | end 93 | end 94 | end 95 | end 96 | end 97 | end 98 | -------------------------------------------------------------------------------- /lib/builderator/tasks/berkshelf.rb: -------------------------------------------------------------------------------- 1 | require 'chef/cookbook/metadata' 2 | require 'thor' 3 | 4 | require_relative '../interface/berkshelf' 5 | require_relative '../patch/thor-actions' 6 | 7 | module Builderator 8 | module Tasks 9 | ## 10 | # Wrap Berkshelf commands 11 | ## 12 | class Berkshelf < Thor 13 | include Thor::Actions 14 | 15 | class_option :debug, :type => :boolean, :desc => 'Show debug output' 16 | 17 | def self.exit_on_failure? 18 | true 19 | end 20 | 21 | desc 'configure', 'Write a Berksfile into the project workspace' 22 | def configure 23 | Interface.berkshelf.write 24 | end 25 | 26 | desc 'metadata COOKBOOK', 'Generate metadata.json from metadata.rb for a COOKBOOK that has a path' 27 | def metadata(cookbook) 28 | fail "Cookbook #{ cookbook } does not have a path!" unless Config.cookbook.depends.has?(cookbook) && 29 | !Config.cookbook.depends[cookbook].path.nil? 30 | 31 | cookbook_path = Config.cookbook.depends[cookbook].path 32 | metadata_rb = Chef::Cookbook::Metadata.new 33 | 34 | metadata_rb.from_file(::File.join(cookbook_path, 'metadata.rb')) 35 | 36 | say_status :metadata, "for cookbook #{ metadata_rb.name }@#{ metadata_rb.version }" 37 | create_file ::File.join(cookbook_path, 'metadata.json'), metadata_rb.to_json, :force => true 38 | end 39 | 40 | desc 'vendor', 'Vendor a cookbook release and its dependencies' 41 | def vendor 42 | invoke :configure, [], options 43 | empty_directory Interface.berkshelf.vendor 44 | 45 | command = "#{Interface.berkshelf.command} vendor #{Interface.berkshelf.vendor} " 46 | command << "-d " if options[:debug] 47 | command << "-c #{Interface.berkshelf.berkshelf_config} " unless Interface.berkshelf.berkshelf_config.nil? 48 | command << "-b #{Interface.berkshelf.source}" 49 | 50 | remove_file Interface.berkshelf.lockfile 51 | inside Interface.berkshelf.directory do 52 | run command 53 | end 54 | end 55 | 56 | desc 'upload', 'Upload the local cookbook source and its dependencies to the Chef server' 57 | def upload 58 | vendor 59 | 60 | command = "#{Interface.berkshelf.command} upload " 61 | command << "-d " if options[:debug] 62 | command << "-c #{Interface.berkshelf.berkshelf_config} " unless Interface.berkshelf.berkshelf_config.nil? 63 | command << "-b #{Interface.berkshelf.source}" 64 | 65 | inside Interface.berkshelf.directory do 66 | run command 67 | end 68 | end 69 | 70 | desc 'uncache', 'Delete the Berkshelf cache' 71 | def uncache 72 | remove_dir File.join(ENV['HOME'], '.berkshelf/cookbooks') 73 | end 74 | 75 | desc 'clean', 'Remove a local vendor directory' 76 | def clean 77 | remove_dir Interface.berkshelf.vendor 78 | remove_file Interface.berkshelf.source 79 | remove_file Interface.berkshelf.lockfile 80 | end 81 | end 82 | end 83 | end 84 | -------------------------------------------------------------------------------- /lib/builderator/patch/thor-actions.rb: -------------------------------------------------------------------------------- 1 | require 'bundler' 2 | require 'childprocess' 3 | require 'thor/actions' 4 | 5 | require_relative '../util' 6 | 7 | class Thor 8 | ## 9 | # Patch some Thor actions 10 | ## 11 | module Actions 12 | ## 13 | # Replace `run` with IO::popen to accept STDIN 14 | ## 15 | def run_with_input(command, input, config = {}) 16 | return unless behavior == :invoke 17 | 18 | destination = relative_to_original_destination_root(destination_root, false) 19 | desc = "#{command} from #{destination.inspect}" 20 | 21 | if config[:with] 22 | desc = "#{File.basename(config[:with].to_s)} #{desc}" 23 | command = "#{config[:with]} #{command}" 24 | end 25 | 26 | say_status :run, desc, config.fetch(:verbose, true) 27 | return if options[:pretend] 28 | 29 | output = config.fetch(:stdout, STDOUT) 30 | 31 | IO.popen(command, 'r+') do |io| 32 | io.write(input) 33 | io.close_write 34 | 35 | ## Stream output 36 | loop do 37 | break if io.eof? 38 | 39 | output.write(io.readpartial(4096)) 40 | output.flush 41 | end 42 | end 43 | end 44 | 45 | ## 46 | # Run an external command without bundler's injected environment variables 47 | # (e.g. keep vagrant happy in it's own little vendor full of unicorns) 48 | ## 49 | def run_without_bundler(command, config = {}) 50 | destination = relative_to_original_destination_root(destination_root, false) 51 | desc = "#{command} from #{destination.inspect}" 52 | 53 | if config[:with] 54 | desc = "#{File.basename(config[:with].to_s)} #{desc}" 55 | command = "#{config[:with]} #{command}" 56 | end 57 | 58 | say_status :run, desc, config.fetch(:verbose, true) 59 | return if options[:pretend] 60 | 61 | output = config.fetch(:stdout, STDOUT) 62 | 63 | Bundler.with_clean_env do 64 | if config.fetch(:childprocess, false) 65 | process = ChildProcess.build(*command.split(' ')) 66 | process.io.inherit! 67 | 68 | process.start 69 | process.wait 70 | return process.exit_code 71 | end 72 | 73 | IO.popen(command, 'r+') do |io| 74 | ## Stream output 75 | loop do 76 | break if io.eof? 77 | 78 | output.write(io.readpartial(4096)) 79 | output.flush 80 | end 81 | end 82 | end 83 | end 84 | 85 | alias_method :thor_run, :run 86 | def run(command, config = {}) 87 | thor_run(command, config) 88 | fail "Command failed: #{command}" if $?.exitstatus != 0 89 | end 90 | 91 | ## 92 | # Make `template` load from a sane path and render in the context of Config 93 | ## 94 | def template(source, destination, config = {}) 95 | content = ERB.new(Builderator::Util.source_path(source).binread, 96 | nil, '-', '@output_buffer').result(Builderator::Config.instance_eval('binding')) 97 | 98 | create_file Builderator::Util.relative_path(destination), content, config 99 | end 100 | end 101 | end 102 | -------------------------------------------------------------------------------- /lib/builderator/model/cleaner/images.rb: -------------------------------------------------------------------------------- 1 | require 'aws-sdk' 2 | require_relative '../../util' 3 | 4 | module Builderator 5 | module Model 6 | # :nodoc: 7 | module Cleaner 8 | def self.images 9 | @images ||= Images.new 10 | end 11 | 12 | ## 13 | # EC2 AMI Resources 14 | ## 15 | class Images < Model::Cleaner::Base 16 | PROPERTIES = %w(image_location state owner_id public architecture image_type 17 | name description root_device_type virtualization_type 18 | hypervisor) 19 | 20 | def fetch 21 | @resources = {}.tap do |i| 22 | Util.ec2.describe_images(:filters => [ 23 | { 24 | :name => 'state', 25 | :values => %w(available pending) 26 | } 27 | ], :owners => %w(self)).each do |page| 28 | page.images.each do |image| 29 | properties = Util.from_tags(image.tags) 30 | properties['creation_date'] = DateTime.iso8601(image.creation_date) 31 | PROPERTIES.each { |pp| properties[pp] = image[pp.to_sym] } 32 | 33 | i[image.image_id] = { 34 | :id => image.image_id, 35 | :properties => properties, 36 | :snapshots => image.block_device_mappings.map { |b| b.ebs.snapshot_id rescue nil }.reject(&:nil?), 37 | :parent => properties.fetch('parent_ami', '(undefined)') 38 | } 39 | end 40 | end 41 | end 42 | end 43 | 44 | def snapshots 45 | resources.values.map { |i| i[:snapshots] }.flatten 46 | end 47 | 48 | def latest 49 | {}.tap do |latest| 50 | ## Group images 51 | group_by = Config.cleaner.group_by 52 | groups = {}.tap do |object| 53 | break { 'all' => resources.values } if group_by.empty? 54 | 55 | resources.each do |_, image| 56 | ## Construct a grouping-key from image properties 57 | grouping_key = group_by.map do |grouping_property| 58 | image[:properties].fetch(grouping_property.to_s, '(unknown)') 59 | end.join(':') 60 | 61 | ## Create an array for the group if it doesn't already exist 62 | ## and add the image to it 63 | (object[grouping_key] ||= []) << image 64 | end 65 | end 66 | 67 | ## Sort each grouping 68 | groups.each do |_, group| 69 | group.sort! { |a, b| b[:properties][Config.cleaner.sort_by] <=> a[:properties][Config.cleaner.sort_by] } 70 | end 71 | 72 | ## Slice to `keep` length 73 | groups.each do |_, group| 74 | group.slice!(Config.cleaner.keep..-1) 75 | end 76 | 77 | ## Reduce 78 | groups.values.flatten.each { |i| latest[i[:id]] = i } 79 | end 80 | end 81 | 82 | def in_use 83 | {}.tap do |used| 84 | used.merge!(select(Cleaner.instances.images)) 85 | used.merge!(select(Cleaner.launch_configs.images)) 86 | used.merge!(latest) 87 | used.merge!(select(used.values.map { |i| i[:parent] })) 88 | end 89 | end 90 | end 91 | end 92 | end 93 | end 94 | -------------------------------------------------------------------------------- /docs/versioning.md: -------------------------------------------------------------------------------- 1 | Versioning 2 | ========== 3 | 4 | This functionality is currently supported for Git. 5 | 6 | ## Version Bump Steps 7 | 8 | * `major` - Increment major version. 9 | * `major-prerelease` - Start a pre-release train from the next major version (increments major version). 10 | * `minor` - Increment minor version. 11 | * `minor-prerelease` - Start a pre-release train from the next minor version (increments minor version). 12 | * `patch` - Increment the patch version. 13 | * `patch-prerelease` - Force a new pre-release train from the next patch version, even if the current version is a pre-release. 14 | * `release` - Release a pre-release train a the current `major`.`minor`.`patch` version. 15 | * `prerelease NAME` Create or increment a pre-release version. If the current version is not a pre-release, the patch version will also be incremented. 16 | * `build` Add or increment a build number. 17 | 18 | Step types are an ordered-set. Incrementing a higher step resets all lower parameters. 19 | 20 | ## Commands 21 | 22 | ### `build version current` 23 | 24 | Searches for the newest SCM tag that is a valid sem-ver string and writes it to VERSION in the project root. 25 | 26 | ### `build version bump [STEP [PRERELEASE_NAME=alpha]]` 27 | 28 | Increment the current version by the desired step and create a new SCM tag at HEAD. 29 | 30 | If STEP is omitted, Builderator will scan messages of commits between HEAD and the current version tag for hash-tag style annotations indicating how to increment the version, finally defaulting to a `patch` step if no annotations are found. If multiple `#STEP` annotations are detected, the largest (e.g. `#major` > `#patch`) step will be applied. 31 | 32 | ## Configuration 33 | 34 | The `autoversion` namespace has two attributes: 35 | 36 | * `create_tags BOOLEAN` enables auto-generation of SCM tags after `bump` tasks. Default `false`. 37 | * `search_tags` enables detection of the current version from SCM tags. Default `true`. 38 | 39 | ```ruby 40 | autoversion do |version| 41 | version.create_tags false 42 | version.search_tags true 43 | end 44 | ``` 45 | 46 | ## Adding Providers 47 | 48 | SCM providers must extend the `Builderator::Control::Version::SCM` module, and must implement two methods in their singleton class: 49 | 50 | * `self._history` Return an array of hashes with the following keys: 51 | - `:id` SCM commit identity 52 | - `:message` SCM commit message 53 | - `:tags` `nil` or an array of semver strings 54 | * `self.supported?` Return `true` if the provider supports the build environment (e.g. the `Git` provider checks that `pwd` is a git repository), else return `false`. 55 | 56 | To enable a provider module, pass it to `SCM.register`. See [Builderator::Control::Version::Git](blob/auto-version/lib/builderator/control/version/git.rb) for an example. 57 | 58 | ## This looks like `thor-scmversion` 59 | 60 | _Why aren't you using `thor-scmversion`?!_ 61 | 62 | Well yes, it's based upon `thor-scmversion`, which I've been using for a while. `thor-scmversion` provides a nice model to ensure correct versioning of automatically built modules, but the project is largely abandoned, lacks some key features required for Builderator, and has a very inefficient access path for reading Git SCM data. 63 | 64 | This implementation adds `TYPE-prerelease` bump steps, improves semver matching regular-expressions, dramatically improves git-data access time for repositories with many tags (only reads from git-blobs once), 65 | and de-couples core functionality for parsing and incrementing versions from Thor tasks and actual mutation of the repository. 66 | -------------------------------------------------------------------------------- /lib/builderator/tasks/vendor.rb: -------------------------------------------------------------------------------- 1 | require 'pathname' 2 | require 'thor' 3 | 4 | require_relative '../patch/thor-actions' 5 | require_relative '../util' 6 | 7 | module Builderator 8 | module Tasks 9 | ## 10 | # Tasks to fetch and clean up remote artifacts 11 | ## 12 | class Vendor < Thor 13 | include Thor::Actions 14 | 15 | def self.exit_on_failure? 16 | true 17 | end 18 | 19 | desc 'all', 'Fetch all vendor sources' 20 | def all 21 | Config.vendor.each { |name, _| fetch(name) } 22 | end 23 | 24 | desc 'clean [NAME]', 'Clean up vendor directories' 25 | def clean(name = nil) 26 | ## Clean up all vendors 27 | return Config.vendor.each { |n, _| clean(n) } if name.nil? 28 | 29 | remove_dir Util.vendor(name) 30 | end 31 | 32 | desc 'fetch NAME', 'Fetch vendor NAME from its source' 33 | def fetch(name = :default) 34 | empty_directory Util::VENDOR 35 | 36 | path = Util.vendor(name) 37 | params = Config.vendor(name) 38 | 39 | if params.has?(:github) 40 | say_status :vendor, "#{ name } from GitHub repository #{ params.github }" 41 | _fetch_github(path, params) 42 | elsif params.has?(:git) 43 | say_status :vendor, "#{ name } from git repository #{ params.git }" 44 | _fetch_git(path, params) 45 | elsif params.has?(:path) 46 | say_status :vendor, "#{ name } from path #{ params.path }" 47 | _fetch_path(path, params) 48 | end 49 | 50 | ## Include any policies embedded in this vendor 51 | Config.recompile 52 | end 53 | 54 | no_commands do 55 | def _fetch_git(path, params) 56 | ## Ensure that there isn't already something there 57 | unless path.join('.git').exist? 58 | remove_dir path 59 | empty_directory path 60 | end 61 | 62 | inside path do 63 | ## Initialize new repository 64 | unless path.join('.git').exist? 65 | run 'git init' 66 | run "git remote add #{ params.fetch(:remote, 'origin') } #{ params.git }" 67 | end 68 | 69 | run "git fetch #{ params.fetch(:remote, 'origin') } --tags --prune" 70 | 71 | ## Checkout reference 72 | if params.has?(:tag) then run "git checkout #{ params.tag }" 73 | elsif params.has?(:ref) then run "git checkout #{ params.ref }" 74 | else ## specified branch or master 75 | run "git checkout #{ params.fetch(:branch, 'master') }" 76 | 77 | ## Only pull if a tracking branch is checked out 78 | run "git pull #{ params.fetch(:remote, 'origin') } #{ params.fetch(:branch, 'master') }" 79 | end 80 | 81 | ## Apply relative subdirectory 82 | run "git filter-branch --subdirectory-filter \"#{ params.rel }\" --force" if params.has?(:rel) 83 | 84 | ## Update Submodules 85 | if path.join('.gitmodules').exist? 86 | run "git submodule update --init --recursive" 87 | end 88 | end 89 | end 90 | 91 | def _fetch_github(path, params) 92 | params.git = "git@github.com:#{ params.github }.git" 93 | _fetch_git(path, params) 94 | end 95 | 96 | def _fetch_path(path, params) 97 | remove_dir path.to_s if path.exist? 98 | create_link path.to_s, params.path.to_s 99 | end 100 | end 101 | end 102 | end 103 | end 104 | -------------------------------------------------------------------------------- /bin/build-clean: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require 'thor' 4 | 5 | require_relative '../lib/builderator/config' 6 | require_relative '../lib/builderator/control/cleaner' 7 | 8 | module Builderator 9 | module Tasks 10 | ## 11 | # Tasks to identify and remove unused EC2 resources 12 | ## 13 | class Cleaner < Thor 14 | class_option :region, 15 | :type => :string, 16 | :aliases => :r, 17 | :desc => 'AWS Region in which to perform tasks' 18 | class_option :commit, 19 | :type => :boolean, 20 | :default => false, 21 | :desc => 'Perform mutating API calls to cleanup resources' 22 | class_option :filter, 23 | :type => :array, 24 | :aliases => :f, 25 | :desc => 'Key/value pairs to filter resources (--filter name foo owner_id 123456789)' 26 | class_option :force, 27 | :type => :boolean, 28 | :default => false, 29 | :desc => 'Disable safety restrictions, including resource limits' 30 | 31 | class_option 'group-by', 32 | :type => :array, 33 | :desc => 'Tags/properties by which to group resources for pruning' 34 | class_option 'sort-by', 35 | :type => :string, 36 | :default => 'creation_date', 37 | :desc => 'Tag/property by which to sort grouped resources' 38 | class_option :keep, 39 | :type => :numeric, 40 | :default => 5, 41 | :desc => 'Number of resources in each group to keep' 42 | 43 | def initialize(*_) 44 | super 45 | 46 | ## Convert array of filter key-values to a hash 47 | options['filters'] = Hash[*options['filter']] if options['filter'].is_a?(Array) 48 | 49 | ## Load command flags 50 | Config.argv(:cleaner => options, :aws => { :region => options['region'] }) 51 | Config.load(File.join(ENV['HOME'], '.builderator/Buildfile')) 52 | Config.load(Util.relative_path('Buildfile').to_s) 53 | 54 | Config.recompile 55 | 56 | say_status 'dry-run', 'This is a dry-run.' unless Config.cleaner.commit 57 | end 58 | 59 | desc 'configs', 'Delete unused launch configurations' 60 | def configs 61 | Control::Cleaner.configs!(&method(:say_status)) 62 | end 63 | 64 | desc 'images', 'Deregister unused images' 65 | def images 66 | Control::Cleaner.images!(&method(:say_status)) 67 | end 68 | 69 | desc 'snapshots', 'Delete unused snapshots' 70 | def snapshots 71 | Control::Cleaner.snapshots!(&method(:say_status)) 72 | end 73 | 74 | desc 'volumes', 'Delete unused volumes' 75 | def volumes 76 | Control::Cleaner.volumes!(&method(:say_status)) 77 | end 78 | 79 | desc 'all', 'Cleaner volumes, launch configs, images, and snapshots in order' 80 | def all 81 | volumes 82 | configs 83 | images 84 | snapshots 85 | 86 | ## TODO Print resource counts here. 87 | return if Control::Cleaner.exceptions.empty? 88 | 89 | say_status :fail, 'Not all tasks completed successfully. The following '\ 90 | 'exceptions occured:', :red 91 | Control::Cleaner.exceptions.each do |e| 92 | say_status(*e.status) 93 | end 94 | 95 | ## Mark the Jenkins job as fail if there were errors. 96 | exit(1) 97 | end 98 | end 99 | end 100 | end 101 | 102 | Builderator::Tasks::Cleaner.start(ARGV) 103 | -------------------------------------------------------------------------------- /spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | lib = File.expand_path('../../lib', __FILE__) 2 | $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) 3 | 4 | require 'builderator/config' 5 | require 'builderator/util' 6 | 7 | module Builderator 8 | ## Hack everything into running in spec/resource 9 | module Util 10 | class << self 11 | def relative_path(*relative) 12 | relative = relative.flatten.map { |r| r.to_s } 13 | Pathname.new(__FILE__).join('../resource').join(*relative).expand_path 14 | end 15 | end 16 | end 17 | end 18 | 19 | RSpec.configure do |config| 20 | # rspec-expectations config goes here. You can use an alternate 21 | # assertion/expectation library such as wrong or the stdlib/minitest 22 | # assertions if you prefer. 23 | config.expect_with :rspec do |expectations| 24 | expectations.include_chain_clauses_in_custom_matcher_descriptions = true 25 | end 26 | 27 | # rspec-mocks config goes here. You can use an alternate test double 28 | # library (such as bogus or mocha) by changing the `mock_with` option here. 29 | config.mock_with :rspec do |mocks| 30 | mocks.verify_partial_doubles = true 31 | end 32 | 33 | # The settings below are suggested to provide a good initial experience 34 | # with RSpec, but feel free to customize to your heart's content. 35 | 36 | # rubocop:disable Style/BlockComments 37 | =begin 38 | # These two settings work together to allow you to limit a spec run 39 | # to individual examples or groups you care about by tagging them with 40 | # `:focus` metadata. When nothing is tagged with `:focus`, all examples 41 | # get run. 42 | config.filter_run :focus 43 | config.run_all_when_everything_filtered = true 44 | 45 | # Allows RSpec to persist some state between runs in order to support 46 | # the `--only-failures` and `--next-failure` CLI options. We recommend 47 | # you configure your source control system to ignore this file. 48 | config.example_status_persistence_file_path = "spec/examples.txt" 49 | 50 | # Limits the available syntax to the non-monkey patched syntax that is 51 | # recommended. For more details, see: 52 | # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/ 53 | # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/ 54 | # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode 55 | config.disable_monkey_patching! 56 | 57 | # This setting enables warnings. It's recommended, but in some cases may 58 | # be too noisy due to issues in dependencies. 59 | config.warnings = true 60 | 61 | # Many RSpec users commonly either run the entire suite or an individual 62 | # file, and it's useful to allow more verbose output when running an 63 | # individual spec file. 64 | if config.files_to_run.one? 65 | # Use the documentation formatter for detailed output, 66 | # unless a formatter has already been configured 67 | # (e.g. via a command-line flag). 68 | config.default_formatter = 'doc' 69 | end 70 | 71 | # Print the 10 slowest examples and example groups at the 72 | # end of the spec run, to help surface which specs are running 73 | # particularly slow. 74 | config.profile_examples = 10 75 | 76 | # Run specs in random order to surface order dependencies. If you find an 77 | # order dependency and want to debug it, you can fix the order by providing 78 | # the seed, which is printed after each run. 79 | # --seed 1234 80 | config.order = :random 81 | 82 | # Seed global randomization in this process using the `--seed` CLI option. 83 | # Setting this allows you to use `--seed` to deterministically reproduce 84 | # test failures related to randomization by passing the same `--seed` value 85 | # as the one that triggered the failure. 86 | Kernel.srand config.seed 87 | =end 88 | # rubocop:enable Style/BlockComments 89 | end 90 | -------------------------------------------------------------------------------- /lib/builderator/config/rash.rb: -------------------------------------------------------------------------------- 1 | require_relative './list' 2 | 3 | module Builderator 4 | module Config 5 | ## 6 | # A self-populating sparse Hash by Rapid7 ([R]apid7 h[ASH]). Definetly 7 | # not a Mash or Smash... 8 | ## 9 | class Rash < Hash 10 | class << self 11 | def coerce(somehting) 12 | return somehting if somehting.is_a?(self) 13 | return new(somehting) if somehting.is_a?(Hash) 14 | 15 | ## `somehting` is not a valid input. Just give back an instance. 16 | new 17 | end 18 | end 19 | 20 | attr_accessor :sealed 21 | 22 | def initialize(from = {}, seal = false) 23 | @sealed = seal 24 | super() do |_, k| 25 | self[k] = self.class.new unless sealed 26 | end 27 | 28 | merge!(from) ## Clone a Rash or coerce a Hash to a new Rash 29 | end 30 | 31 | def clone 32 | self.class.new(self, sealed) 33 | end 34 | 35 | def seal(action = true) 36 | @sealed = action 37 | each_value { |v| v.seal(action) if v.is_a?(self.class) } 38 | end 39 | 40 | def unseal 41 | seal(false) 42 | end 43 | 44 | def has?(key, klass = BasicObject) 45 | include?(key) && fetch(key).is_a?(klass) 46 | end 47 | 48 | ## Symbolize keys 49 | [:include?, :[], :fetch, :[]=, :store].each do |m| 50 | define_method(m) do |key, *args| 51 | super(key.to_sym, *args) 52 | end 53 | end 54 | 55 | def merge!(other) 56 | fail TypeError, 'Argument other of `Rash#merge!(other)` must be a Hash.'\ 57 | " Recieved #{other.class}" unless other.is_a?(Hash) 58 | 59 | other.each_with_object({}) do |(k, v), diff| 60 | ## Replace `-`s with `_`s in in String keys 61 | k = k.gsub(/\-/, '_').to_sym if k.is_a?(String) 62 | 63 | next if has?(k) && self[k] == v 64 | 65 | ## Merge Arrays 66 | if v.is_a?(Array) 67 | self[k] = has?(k) ? Config::List.coerce(self[k]) : Config::List.new 68 | self[k].merge!(v) 69 | 70 | diff[k] = true 71 | next 72 | end 73 | 74 | ## Overwrite non-Hash values 75 | unless v.is_a?(Hash) 76 | self[k] = v 77 | 78 | diff[k] = true 79 | next 80 | end 81 | 82 | ## Merge recursivly coerces `v` to a Rash 83 | self[k] = self.class.coerce(self[k]) 84 | diff[k] = self[k].merge!(v) 85 | end 86 | end 87 | 88 | def diff(other) 89 | fail TypeError, 'Argument other of `Rash#diff(other)` must be a Hash.'\ 90 | " Recieved #{other.class}" unless other.is_a?(Hash) 91 | 92 | other.each_with_object({}) do |(k, v), diff| 93 | next if has?(k) && self[k] == v 94 | 95 | ## Merge Arrays 96 | if v.is_a?(Array) 97 | a = has?(k) ? Config::List.coerce(self[k]) : Config::List.new 98 | b = Config::List.coerce(v) 99 | 100 | diff[k] = { 101 | :+ => b - a, 102 | :- => a - b 103 | } 104 | 105 | next 106 | end 107 | 108 | ## Overwrite non-Hash values 109 | unless v.is_a?(Hash) 110 | diff[k] = { 111 | :+ => v, 112 | :- => fetch(k, nil) 113 | } 114 | 115 | next 116 | end 117 | 118 | diff[k] = self.class.coerce(fetch(k, {})).diff(self.class.coerce(v)) 119 | end 120 | end 121 | 122 | def to_hash 123 | each_with_object({}) do |(k, v), hash| 124 | ## Not a hash-value 125 | next hash[k] = v unless v.is_a?(self.class) 126 | 127 | ## Recursivly coerces `v` to a Hash 128 | hash[k] = v.to_hash 129 | end 130 | end 131 | end 132 | end 133 | end 134 | -------------------------------------------------------------------------------- /lib/builderator/control/data/image.rb: -------------------------------------------------------------------------------- 1 | require 'aws-sdk' 2 | require 'date' 3 | 4 | require_relative '../../util' 5 | 6 | module Builderator 7 | module Control 8 | # :nodoc: 9 | module Data 10 | def self.image(query = {}) 11 | Image.search(query) 12 | end 13 | 14 | ## 15 | # Find AMI IDs to use for sources 16 | ## 17 | module Image 18 | ## Account IDs of public image owners 19 | OWNERS = { 20 | :self => 'self'.freeze, 21 | :ubuntu => '099720109477'.freeze, 22 | :amazon => 'amazon'.freeze, 23 | :marketplace => 'aws-marketplace'.freeze 24 | } 25 | 26 | ## Pre-defined filters 27 | FILTERS = { 28 | 'ubuntu-14.04-daily' => { 29 | 'owner' => OWNERS[:ubuntu], 30 | 'architecture' => 'x86_64', 31 | 'root-device-type' => 'ebs', 32 | 'virtualization-type' => 'hvm', 33 | 'block-device-mapping.volume-type' => 'gp2', 34 | 'name' => '*ubuntu-trusty-daily-amd64-server-201*' 35 | }, 36 | 'windows-server2012-r2' => { 37 | 'owner' => OWNERS[:amazon], 38 | 'architecture' => 'x86_64', 39 | 'root-device-type' => 'ebs', 40 | 'virtualization-type' => 'hvm', 41 | 'block-device-mapping.volume-type' => 'gp2', 42 | 'name' => 'Windows_Server-2012-R2_RTM-English-64Bit-Base*' 43 | }, 44 | 'windows-server2012-r2-core' => { 45 | 'owner' => OWNERS[:amazon], 46 | 'architecture' => 'x86_64', 47 | 'root-device-type' => 'ebs', 48 | 'virtualization-type' => 'hvm', 49 | 'block-device-mapping.volume-type' => 'gp2', 50 | 'name' => 'Windows_Server-2012-R2_RTM-English-64Bit-Core*' 51 | } 52 | } 53 | 54 | ## Filter fields defined in http://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Builderator::Util.ec2.html#describe_images-instance_method 55 | PROPERTIES = %w(architecture block-device-mapping.delete-on-termination 56 | block-device-mapping.device-name block-device-mapping.snapshot-id 57 | block-device-mapping.volume-size block-device-mapping.volume-type 58 | description hypervisor image-id image-type is-public kernel-id 59 | manifest-location name owner-alias owner-id platform product-code 60 | product-code.type ramdisk-id root-device-name root-device-type 61 | state state-reason-code state-reason-message virtualization-type).freeze 62 | 63 | class << self 64 | def search(query = {}) 65 | options = {} 66 | 67 | ## Reverse-merge a pre-defined filter into the query 68 | if query.include?('filter') 69 | query = FILTERS[query['filter']].merge(query) if FILTERS.include?(query['filter']) 70 | 71 | query.delete('filter') 72 | end 73 | 74 | options['image_ids'] = Util.to_array(query.delete('image_id')) if query.include?('image_id') 75 | options['owners'] = Util.to_array(query.delete('owner') { 'self' }) 76 | 77 | options['filters'] = query.each_with_object([]) do |(k, v), memo| 78 | next if v.nil? 79 | 80 | ## Construct filter objects. Assume that non-enumerated keys are tags 81 | memo << { 82 | :name => PROPERTIES.include?(k.to_s) ? k.to_s : "tag:#{ k }", 83 | :values => Util.to_array(v) 84 | } 85 | end 86 | 87 | ## Don't send an empty filters array 88 | options.delete('filters') if options['filters'].empty? 89 | 90 | Util.ec2.describe_images(options) 91 | .each_with_object([]) { |page, images| images.push(*page.images) } 92 | .sort { |a, b| DateTime.iso8601(b.creation_date) <=> DateTime.iso8601(a.creation_date) } 93 | end 94 | end 95 | end 96 | end 97 | end 98 | end 99 | -------------------------------------------------------------------------------- /spec/config_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative './spec_helper' 2 | 3 | # :nodoc: 4 | module Builderator 5 | RSpec.describe Config, '#load' do 6 | before(:example) do 7 | Config.reset! 8 | Config.load(::File.expand_path('../resource/Buildfile', __FILE__)) 9 | end 10 | 11 | it 'defaults to the gecode solver' do 12 | Config.compile 13 | expect(Config.berkshelf.solver).to eq :gecode 14 | end 15 | 16 | it 'loads a DSL file' do 17 | expect(Config.layers.length).to eq 1 18 | end 19 | 20 | it 'compiles loaded DSL' do 21 | layer = Config.layers.first 22 | layer.compile.seal 23 | 24 | expect(layer.build_name).to eq 'builderator' 25 | expect(layer.autoversion.create_tags).to be false 26 | 27 | ## Collection `depends` in Namespace `cookbook` 28 | expect(layer.cookbook.depends['apt']).to be_kind_of(Config::Attributes::Namespace) 29 | expect(layer.cookbook.depends['etcd-v2']).to be_kind_of(Config::Attributes::Namespace) 30 | end 31 | 32 | it 'compiles configuration layers' do 33 | skip 34 | Config.compile 35 | 36 | ## Ensure that layer-order is respected 37 | expect(Config.aws.region).to eq 'us-east-1' 38 | expect(Config.build_name).to eq 'builderator' 39 | end 40 | end 41 | 42 | RSpec.describe Config, '#compile' do 43 | before(:example) do 44 | Builderator::Config::GLOBAL_DEFAULTS.reset! 45 | Builderator::Config.reset! 46 | end 47 | 48 | it 'compiles a trivial build configuration' do 49 | expect(Builderator::Config.layers).to be_empty 50 | 51 | Builderator::Config.load(::File.expand_path('../resource/Buildfile-home-directory', __FILE__)) 52 | Builderator::Config.load(::File.expand_path('../resource/Buildfile-simple', __FILE__)) 53 | 54 | expect(Builderator::Config.all_layers.any?(&:dirty)).to be false 55 | 56 | expect { Config.compile }.not_to raise_error 57 | end 58 | 59 | it 'compiles a build with a build-type policy' do 60 | expect(Builderator::Config.layers).to be_empty 61 | 62 | Builderator::Config.load(::File.expand_path('../resource/Buildfile-home-directory', __FILE__)) 63 | Builderator::Config.load(::File.expand_path('../resource/Buildfile-with-policy', __FILE__)) 64 | 65 | expect(Builderator::Config.all_layers.any?(&:dirty)).to be false 66 | 67 | expect { Config.compile }.not_to raise_error 68 | end 69 | 70 | it 'compiles a build with a build-type policy and overrides' do 71 | expect(Builderator::Config.layers).to be_empty 72 | 73 | Builderator::Config.load(::File.expand_path('../resource/Buildfile-home-directory', __FILE__)) 74 | Builderator::Config.load(::File.expand_path('../resource/Buildfile-with-overrides', __FILE__)) 75 | 76 | expect(Builderator::Config.all_layers.any?(&:dirty)).to be false 77 | 78 | expect { Config.compile }.not_to raise_error 79 | end 80 | 81 | it 'compiles a build with a build-type policy and overrides, and an extended collection item with hash values' do 82 | expect(Builderator::Config.layers).to be_empty 83 | 84 | Builderator::Config.load(::File.expand_path('../resource/Buildfile-home-directory', __FILE__)) 85 | Builderator::Config.load(::File.expand_path('../resource/Buildfile-with-overrides2', __FILE__)) 86 | 87 | expect(Builderator::Config.all_layers.any?(&:dirty)).to be false 88 | 89 | expect { Config.compile }.not_to raise_error 90 | end 91 | 92 | it 'compiles a change after an initial compilation' do 93 | expect(Builderator::Config.layers).to be_empty 94 | 95 | Builderator::Config.load(::File.expand_path('../resource/Buildfile-home-directory', __FILE__)) 96 | Builderator::Config.load(::File.expand_path('../resource/Buildfile-with-overrides2', __FILE__)) 97 | expect(Builderator::Config.all_layers.any?(&:dirty)).to be false 98 | expect { Config.compile }.not_to raise_error 99 | 100 | Builderator::Config.defaults.version '1.2.3' 101 | Builderator::Config.defaults.build_number = '4' 102 | expect { Config.compile }.not_to raise_error 103 | 104 | expect(Builderator::Config.profile('default').tags['version']).to eq '1.2.3-4' 105 | end 106 | end 107 | end 108 | -------------------------------------------------------------------------------- /spec/interface_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative './spec_helper' 2 | 3 | require 'builderator/config' 4 | require 'builderator/interface/berkshelf' 5 | require 'builderator/interface/packer' 6 | require 'builderator/interface/vagrant' 7 | 8 | # :nodoc: 9 | module Builderator 10 | RSpec.describe Interface do 11 | context 'Berksfile' do 12 | berkshelf = Interface.berkshelf 13 | 14 | it 'loads from Config values' do 15 | expect(berkshelf.vendor).to eq Config.local.cookbook_path 16 | end 17 | 18 | it 'generates the correct Berksfile' do 19 | skip 20 | expect(berkshelf.render).to eq IO.read(::File.expand_path('../data/Berksfile', __FILE__)) 21 | end 22 | end 23 | 24 | context 'Vagrantfile' do 25 | # vagrant = Interface.vagrant(:default) 26 | 27 | it 'loads from Config values' do 28 | skip 29 | expect(vagrant.build_name).to eq Config.build_name 30 | end 31 | 32 | it 'generates the correct Vagrantfile' do 33 | skip 34 | pending "test doesn't work with absolute paths" 35 | expect(vagrant.render).to eq IO.read(::File.expand_path('../data/Vagrantfile', __FILE__)) 36 | end 37 | end 38 | 39 | context 'Packer post-processors' do 40 | before(:example) do 41 | Config.reset! 42 | Config.load(::File.expand_path('../resource/Buildfile-with-post-processors', __FILE__)) 43 | Config.compile 44 | end 45 | 46 | it 'generates a single post-processor' do 47 | Config.profile.use('single') 48 | packer = Interface::Packer.new 49 | expect(packer.packerfile['post-processors']).to eq ['docker-push'] 50 | end 51 | 52 | it 'generates a complex post-processor' do 53 | Config.profile.use('complex') 54 | packer = Interface::Packer.new 55 | expect(packer.packerfile['post-processors']).to eq [{ 56 | :type => 'docker-tag', 57 | :repository => 'rapid7/builderator', 58 | :tag => 'latest' 59 | }] 60 | end 61 | 62 | it 'generates a sequence of post-processors' do 63 | Config.profile.use('sequence') 64 | packer = Interface::Packer.new 65 | expect(packer.packerfile['post-processors']).to eq [ 66 | [ 67 | { 68 | :type => 'docker-tag', 69 | :repository => 'rapid7/builderator', 70 | :tag => 'latest' 71 | }, 72 | 'docker-push' 73 | ] 74 | ] 75 | end 76 | 77 | it 'generates multiple sequences of post-processors' do 78 | Config.profile.use('multiple_sequences') 79 | packer = Interface::Packer.new 80 | expect(packer.packerfile['post-processors']).to eq [ 81 | [ 82 | { 83 | :type => 'docker-tag', 84 | :repository => 'rapid7/builderator', 85 | :tag => '1.2.2' 86 | }, 87 | 'docker-push' 88 | ], 89 | [ 90 | { 91 | :type => 'docker-tag', 92 | :repository => 'rapid7/builderator', 93 | :tag => 'latest' 94 | }, 95 | 'docker-push' 96 | ] 97 | ] 98 | end 99 | end 100 | 101 | context 'Packer block-device-mappings' do 102 | before(:example) do 103 | Config.reset! 104 | Config.load(::File.expand_path('../resource/Buildfile-with-block-device-mappings', __FILE__)) 105 | Config.compile 106 | end 107 | 108 | it 'generates an AMI block device mapping' do 109 | Config.profile.use('ami_mappings') 110 | packer = Interface::Packer.new 111 | mappings = packer.packerfile[:builders].first[:ami_block_device_mappings] 112 | expect(mappings).to eq [{ 113 | 'device_name' => '/dev/sda', 114 | 'no_device' => true, 115 | }] 116 | end 117 | 118 | it 'generates a Packer launch block device mapping' do 119 | Config.profile.use('launch_mappings') 120 | packer = Interface::Packer.new 121 | mappings = packer.packerfile[:builders].first[:launch_block_device_mappings] 122 | expect(mappings).to eq [{ 123 | 'device_name' => '/dev/sda', 124 | 'no_device' => true, 125 | }] 126 | end 127 | end 128 | end 129 | end 130 | -------------------------------------------------------------------------------- /lib/builderator/control/version.rb: -------------------------------------------------------------------------------- 1 | require_relative './version/auto' 2 | require_relative './version/bump' 3 | require_relative './version/comparable' 4 | require_relative './version/scm' 5 | require_relative './version/git' 6 | 7 | module Builderator 8 | module Control 9 | ## 10 | # Version management tools 11 | # 12 | # Initial version boosted shamelessly from 13 | # https://github.com/RiotGamesMinions/thor-scmversion 14 | ## 15 | class Version 16 | FORMAT = /(?\d+)\.(?\d+)\.(?\d+)(?-(?[A-Za-z0-9]+)\.(?\d+))?(?:\+build\.(?\d+))?$/ 17 | DEFAULT_PRERELEASE_NAME = 'alpha'.freeze 18 | 19 | ## Order of precedence for release types 20 | RELEASE_TYPES = { 21 | 'major' => 0, 22 | 'major-prerelease' => 5, 23 | 'minor' => 10, 24 | 'minor-prerelease' => 15, 25 | 'patch' => 20, 26 | 'patch-prerelease' => 25, 27 | 'release' => 30, 28 | 'prerelease' => 35, 29 | 'build' => 40 30 | } 31 | 32 | class << self 33 | def current 34 | @current ||= SCM.tags.last 35 | 36 | if @current.nil? && Util.relative_path('VERSION').exist? 37 | @current = Version.from_string(Util.relative_path('VERSION').read) 38 | end 39 | 40 | if @current.nil? 41 | fail 'No current version found! Create a VERSION file or set a version tag in your SCM.' 42 | end 43 | 44 | @current 45 | end 46 | 47 | def set_config_version 48 | Config.defaults.version = current.to_s 49 | Config.recompile 50 | end 51 | 52 | def write 53 | current.write 54 | end 55 | 56 | ## 57 | # Alias `bump` to the current version 58 | ## 59 | def bump(type = nil, prerelease_name = nil) 60 | @current = current.clone 61 | 62 | current.bump(type, prerelease_name) 63 | SCM.tags << current 64 | 65 | current 66 | end 67 | 68 | ## Parse a SemVer string into a Version 69 | def from_string(arg, options = {}) 70 | matchdata = arg.match(FORMAT) 71 | return nil if matchdata.nil? 72 | 73 | new(matchdata[:major], matchdata[:minor], matchdata[:patch], matchdata[:build], options).tap do |version| 74 | version.is_prerelease = !matchdata[:prerelease].nil? 75 | if version.is_prerelease 76 | version.prerelease_name = matchdata[:prerelease_name] 77 | version.prerelease_iteration = matchdata[:prerelease_iteration].to_i 78 | end 79 | end 80 | end 81 | end 82 | 83 | def initialize(major, minor, patch, build = nil, **options) 84 | @major = major.to_i 85 | @minor = minor.to_i 86 | @patch = patch.to_i 87 | @build = build.to_i unless build.nil? 88 | 89 | @ref = options[:ref] 90 | end 91 | 92 | include Auto 93 | include Bump 94 | include Comparable 95 | 96 | attr_accessor :ref 97 | 98 | attr_accessor :major 99 | attr_accessor :minor 100 | attr_accessor :patch 101 | 102 | attr_accessor :is_prerelease 103 | attr_accessor :prerelease_name 104 | attr_accessor :prerelease_iteration 105 | 106 | attr_accessor :build 107 | 108 | ## Create or bump a new prerelease train 109 | def prerelease(name = nil) 110 | self.build = nil ## Reset the build counter 111 | 112 | ## Increment current prerelease train 113 | if is_prerelease && (name.nil? || name == prerelease_name) 114 | self.prerelease_iteration += 1 115 | return self 116 | end 117 | 118 | ## New prerelease train 119 | self.is_prerelease = true 120 | self.prerelease_name = name.nil? ? DEFAULT_PRERELEASE_NAME : name 121 | self.prerelease_iteration = 0 122 | 123 | self 124 | end 125 | 126 | def write 127 | Util.relative_path('VERSION').write(to_s) 128 | end 129 | 130 | def to_s 131 | string = [major, minor, patch].join('.') 132 | string << "-#{prerelease_name}.#{prerelease_iteration}" if is_prerelease 133 | string << "+build.#{build}" unless build.nil? 134 | string 135 | end 136 | end 137 | end 138 | end 139 | -------------------------------------------------------------------------------- /lib/builderator/config.rb: -------------------------------------------------------------------------------- 1 | require_relative './config/file' 2 | require_relative './config/defaults' 3 | 4 | module Builderator 5 | ## 6 | # Global Configuration 7 | ## 8 | module Config 9 | class << self 10 | ## GLOBAL_DEFAULTS is the lowest-precedence layer, followed by dynamically 11 | ## defined instance-defaults. 12 | def layers 13 | @layers ||= [] 14 | end 15 | 16 | def all_layers 17 | ([GLOBAL_DEFAULTS, defaults] + layers + [overrides, argv]) 18 | end 19 | 20 | def defaults 21 | @defaults ||= File.new({}, :source => 'defaults') 22 | end 23 | 24 | def overrides 25 | @overrides ||= File.new({}, :source => 'overrides') 26 | end 27 | 28 | def argv(options = {}) 29 | @argv ||= File.new(options, :source => 'argv') 30 | end 31 | 32 | def append(path) 33 | layers << File.from_file(path) if ::File.exist?(path) 34 | end 35 | alias_method :load, :append 36 | 37 | def append_json(path) 38 | layers << File.from_json(path) if ::File.exist?(path) 39 | end 40 | alias_method :load_json, :append_json 41 | 42 | def prepend(path) 43 | layers.unshift(File.from_file(path)) if ::File.exist?(path) 44 | end 45 | 46 | def prepend_json(path) 47 | layers.unshift(File.from_json(path)) if ::File.exist?(path) 48 | end 49 | 50 | ## 51 | # The compile method renders a single File instance from all of the configured 52 | # input layers. It follows the following algorithm: 53 | # 54 | # => `DIRTY` is defined as the logical OR of the dirty state of each layer. 55 | # Layers are responsible for detecting changes to their own properties 56 | # while being compiled. 57 | # 58 | # => LOOP unitl not DIRTY plus 1 iteration 59 | # 1. Call each layer's own compile method. 60 | # 2. For each layer, merge it into the COMPILED output. 61 | # FAIL if ITERATIONS > LIMIT 62 | # 63 | # => The additional iteration after DIRTY becomes false is to ensure that 64 | # any changes to the compiled output during the final merge are passed 65 | # back through each layer's compile. 66 | ## 67 | def compile(max_iterations = 6) 68 | compiled.unseal 69 | compile_iterations = 0 70 | break_break = false 71 | 72 | ## Inject GLOBAL_DEFAULTS before starting compile 73 | compiled.merge(GLOBAL_DEFAULTS.compile) 74 | 75 | ## Automatically recompile while layers are dirty 76 | loop do 77 | fail "Re-compile iteration limit of #{max_iterations} has been exceeded. "\ 78 | "#{all_layers.select(&:dirty).map(&:source).join(', ')} are dirty." if compile_iterations >= max_iterations 79 | 80 | ## Merge layers from lowest to highest. Compile, then merge. 81 | all_layers.each do |layer| 82 | layer.compile 83 | end 84 | 85 | all_layers.each do |layer| 86 | layer.policies.each { |_, policy| compiled.merge(policy) } 87 | 88 | ## Merge layer after its policy documents to allow overides 89 | compiled.merge(layer) 90 | end 91 | 92 | break if break_break && !dirty? 93 | 94 | break_break = !dirty? 95 | compile_iterations += 1 96 | end 97 | 98 | ## Don't auto-populate keys anymore 99 | compiled.seal 100 | end 101 | alias_method :recompile, :compile 102 | 103 | def dirty? 104 | all_layers.any?(&:dirty) 105 | end 106 | 107 | def compiled 108 | @compiled ||= File.new({}, :source => 'compiled') 109 | end 110 | 111 | def reset! 112 | @layers = [] 113 | 114 | @defaults = File.new({}, :source => 'defaults') 115 | @overrides = File.new({}, :source => 'overrides') 116 | @argv = File.new({}, :source => 'argv') 117 | 118 | @compiled = File.new({}, :source => 'compiled') 119 | end 120 | 121 | def fetch(key, *args) 122 | compiled.send(key, *args) 123 | end 124 | alias_method :[], :fetch 125 | 126 | def method_missing(method_name, *args) 127 | return super unless compiled.respond_to?(method_name) 128 | 129 | compiled.send(method_name, *args) 130 | end 131 | end 132 | end 133 | end 134 | -------------------------------------------------------------------------------- /template/Vagrantfile.erb: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | Vagrant.configure('2') do |config| 5 | config.berkshelf.enabled = false if Vagrant.has_plugin? 'vagrant-berkshelf' 6 | 7 | config.vm.hostname = 'builderator-<%= build_name %>' 8 | 9 | ## Local Provider 10 | config.vm.provider '<%= profile.current.vagrant.local.provider %>' do |local, override| 11 | local.memory = <%= profile.current.vagrant.local.memory %> 12 | local.cpus = <%= profile.current.vagrant.local.cpus %> 13 | 14 | override.vm.box = '<%= profile.current.vagrant.local.box %>' 15 | <% if profile.current.vagrant.local.has?(:box_url) -%> 16 | override.vm.box_url = '<%= profile.current.vagrant.local.box_url %>' 17 | <% end -%> 18 | end 19 | 20 | ## EC2 Provider 21 | config.vm.provider '<%= profile.current.vagrant.ec2.provider %>' do |ec2, override| 22 | override.vm.box = '<%= profile.current.vagrant.ec2.box %>' 23 | override.vm.box_url = '<%= profile.current.vagrant.ec2.box_url %>' 24 | override.ssh.username = '<%= profile.current.vagrant.ec2.ssh_username %>' 25 | 26 | ec2.region = '<%= profile.current.vagrant.ec2.region %>' 27 | ec2.subnet_id = '<%= profile.current.vagrant.ec2.subnet_id %>' 28 | 29 | ## VPN Connected VPC 30 | ec2.associate_public_ip = <%= profile.current.vagrant.ec2.associate_public_ip %> 31 | ec2.ssh_host_attribute = :<%= profile.current.vagrant.ec2.ssh_host_attribute %> 32 | 33 | ec2.instance_type = '<%= profile.current.vagrant.ec2.instance_type %>' 34 | ec2.security_groups = <%= profile.current.vagrant.ec2.security_groups %> 35 | ec2.iam_instance_profile_arn = '<%= profile.current.vagrant.ec2.iam_instance_profile_arn %>' 36 | 37 | ec2.ami = '<%= profile.current.vagrant.ec2.source_ami %>' 38 | ec2.tags = <%= profile.current.vagrant.ec2.tags %> 39 | end 40 | 41 | <% profile.current.vagrant.port.each do |name, port| -%> 42 | config.vm.network :forwarded_port, :host => <%= port.host %>, :guest => <%= port.guest %> # <%= name %> 43 | <% end -%> 44 | 45 | <% profile.current.vagrant.sync.each do |name, sync| -%> 46 | config.vm.synced_folder "<%= sync.path %>", "<%= sync.destination %>" # <%= name %> 47 | <% end -%> 48 | 49 | ## 50 | # Use the standard data directory for Chef 51 | ## 52 | config.vm.provision :shell, 53 | :inline => "sudo mkdir -p <%= chef.staging_directory %>/cache && "\ 54 | "sudo chown $(whoami) -R <%= chef.staging_directory %>", 55 | :privileged => false 56 | 57 | ## 58 | # Sync build artifacts to the VM 59 | ## 60 | <% profile.current.artifact.each do |name, artifact| -%> 61 | config.vm.provision :file, 62 | :source => '<%= artifact.path %>', 63 | :destination => '<%= artifact.destination %>' 64 | <% end -%> 65 | 66 | config.omnibus.chef_version = '<%= chef.version %>' 67 | config.vm.provision :chef_solo do |chef| 68 | chef.log_level = :<%= chef.log_level %> 69 | 70 | chef.cookbooks_path = '<%= local.cookbook_path %>' 71 | <% unless local.data_bag_path.nil? -%> 72 | chef.data_bags_path = '<%= local.data_bag_path %>' 73 | <% end -%> 74 | <% unless local.environment_path.nil? -%> 75 | chef.environments_path = '<%= local.environment_path %>' 76 | <% end -%> 77 | chef.provisioning_path = '<%= chef.staging_directory %>' 78 | 79 | chef.run_list = <%= profile.current.chef.run_list %> 80 | <% unless profile.current.chef.environment.nil? -%> 81 | chef.environment = '<%= profile.current.chef.environment %>' 82 | <% end -%> 83 | 84 | <% unless profile.current.chef.binary_env.nil? -%> 85 | chef.binary_env = '<%= profile.current.chef.binary_env %>' 86 | <% end -%> 87 | 88 | <% if profile.current.chef.node_attrs.nil? -%> 89 | chef.json = {} 90 | <% else -%> 91 | chef.json = <%= profile.current.chef.node_attrs %> 92 | <% end -%> 93 | 94 | ## Tell Chef that this is a Vagrant build 95 | chef.json[:vagrant] = true 96 | end 97 | 98 | <%- profile.current.provisioner.each do |name, provisioner| -%> 99 | <%- provisioner.each do |func, val| -%> 100 | <%- if val.is_a?(Array) -%> 101 | <%- val.each do |v| -%> 102 | config.vm.provision "<%= name %>", <%= func %>: "<%= v %>" 103 | <%- end -%> 104 | <%- else -%> 105 | config.vm.provision "<%= name %>", <%= func %>: "<%= val %>" 106 | <%- end -%> 107 | <%- end -%> 108 | <%- end -%> 109 | end 110 | -------------------------------------------------------------------------------- /lib/builderator/tasks/cookbook.rb: -------------------------------------------------------------------------------- 1 | # require 'chef' 2 | # require 'chef/cookbook_site_streaming_uploader' 3 | # require 'rubygems/package' 4 | # require 'thor' 5 | # require 'thor/actions' 6 | # require 'thor-scmversion' 7 | # require 'zlib' 8 | # 9 | # require_relative '../util' 10 | # require_relative '../util/cookbook' 11 | # 12 | # module Builderator 13 | # module Tasks 14 | # class Cookbook < Thor 15 | # include Thor::Actions 16 | # class_option :version, :type => :boolean, 17 | # :default => true, 18 | # :desc => 'Write current verison to file' 19 | # 20 | # desc 'metadata [PATH]', 'Use cookbook matadata file at PATH/metadata.rb to generate PATH/matadata.json' 21 | # def metadata(cookbook = nil) 22 | # Util::Cookbook.path(cookbook) unless cookbook.nil? 23 | # metadata = Util::Cookbook.metadata 24 | # 25 | # invoke 'version:current', [], options if options['version'] 26 | # say_status :metadata, "for cookbook #{ metadata.name }@#{ metadata.version }" 27 | # create_file Util::Cookbook.path.join('metadata.json').to_s, metadata.to_json, :force => true 28 | # 29 | # metadata 30 | # end 31 | # 32 | # desc 'build PATH', 'Package cookbook at PATH into a tarball' 33 | # def build(cookbook = nil) 34 | # Util::Cookbook.path(cookbook) unless cookbook.nil? 35 | # 36 | # ## Generate metadata.json 37 | # metadata = invoke(Tasks::Cookbook, :metadata, [], options) 38 | # 39 | # ## Create a gzipped tarball and add cookbook files to it. We avoid 40 | # ## buffering this in memory (e.g. using StringIO) at all cost 41 | # ## to keep large files from gumming things up. 42 | # say_status :package, "cookbook into #{ metadata.archive }" 43 | # metadata.archive.open('wb') do |package| 44 | # Zlib::GzipWriter.wrap(package) do |gz| 45 | # Gem::Package::TarWriter.new(gz) do |tar| 46 | # metadata.files.each do |f| 47 | # f_stat = File.stat(f) 48 | # 49 | # ## Add directories 50 | # next tar.mkdir(Util::Cookbook.archive_path(metadata, f).to_s, f_stat.mode) if File.directory?(f) 51 | # 52 | # ## Add files 53 | # tar.add_file_simple(Util::Cookbook.archive_path(metadata, f).to_s, f_stat.mode, f_stat.size) do |entry| 54 | # f.open('rb') { |h| entry.write(h.read) } 55 | # end 56 | # end 57 | # end 58 | # end 59 | # end 60 | # 61 | # metadata 62 | # end 63 | # 64 | # desc 'push PATH', 'Publish cookbook at PATH to supermarket.chef.io' 65 | # option 'chef-config', :type => :string, 66 | # :aliases => :c, 67 | # :default => File.join(ENV['HOME'], '.chef/knife.rb') 68 | # option :site, :type => :string, :aliases => :s 69 | # option :user, :type => :string, :aliases => :u 70 | # option :key, :type => :string, :aliases => :k 71 | # def push(cookbook = nil) 72 | # Chef::Config.from_file(options['chef-config']) 73 | # Util::Cookbook.path(cookbook) unless cookbook.nil? 74 | # 75 | # ## Set defaults after Chef::Config is loaded 76 | # options['site'] ||= Chef::Config.knife['supermarket_site'] || 'https://supermarket.chef.io/' 77 | # options['user'] ||= Chef::Config.knife['supermarket_user'] || Chef::Config.node_name 78 | # options['key'] ||= Chef::Config.knife['supermarket_key'] || Chef::Config.client_key 79 | # 80 | # ## Build the cookbook taball 81 | # metadata = invoke(Tasks::Cookbook, :build, [cookbook], options) 82 | # say_status :upload, "cookbook #{ metadata.name }@#{ metadata.version } to #{ options['site'] }" 83 | # 84 | # metadata.archive.open('rb') do |c| 85 | # http_resp = Chef::CookbookSiteStreamingUploader.post( 86 | # File.join(options['site'], '/api/v1/cookbooks'), 87 | # options['user'], 88 | # options['key'], 89 | # :tarball => c, 90 | # :cookbook => { :category => '' }.to_json 91 | # ) 92 | # 93 | # if http_resp.code.to_i != 201 94 | # say_status :error, "Error uploading cookbook: #{ http_resp.code } #{ http_resp.message }", :red 95 | # say http_resp.body 96 | # exit(1) 97 | # end 98 | # end 99 | # end 100 | # 101 | # desc 'version COOKBOOK', 'Print the current version of a vendored cookbook' 102 | # option :path, :default => Util::Cookbook::DEFAULT_VENDOR, :desc => 'Path to vendored cookbooks' 103 | # def version(cookbook) 104 | # Util::Cookbook.path(File.join(options['path'], cookbook)) 105 | # puts Util::Cookbook.metadata.version 106 | # end 107 | # end 108 | # end 109 | # end 110 | -------------------------------------------------------------------------------- /lib/builderator/control/cleaner.rb: -------------------------------------------------------------------------------- 1 | require_relative '../model/cleaner' 2 | require_relative '../util' 3 | 4 | module Builderator 5 | module Control 6 | ## 7 | # Control logic for cleanup tasks 8 | ## 9 | module Cleaner 10 | class << self 11 | def configs! 12 | resources = Model::Cleaner.launch_configs.unused 13 | 14 | yield :launch_configs, "Found #{resources.length} Launch Configurations to remove" 15 | verify!(:launch_configs, 'Cleanup Launch Configurations', resources, &Proc.new) 16 | aborted!(&Proc.new) 17 | 18 | resources.keys.sort.each do |id| 19 | yield :remove, "Launch Configuration #{id}", :red 20 | Model::Cleaner.launch_configs.resources.delete(id) 21 | 22 | next unless commit? 23 | Util.asg.delete_launch_configuration(:launch_configuration_name => id) 24 | end 25 | rescue Aws::AutoScaling::Errors::ServiceError => e 26 | exceptions << Util::AwsException.new('Cleanerup Launch Configurations', e) 27 | yield(*exceptions.last.status) 28 | end 29 | 30 | def images! 31 | resources = Model::Cleaner.images.unused 32 | 33 | yield :images, "Found #{resources.length} Images to remove" 34 | yield :grouping, "Groupd images by #{Config.cleaner.group_by}" if Config.cleaner.group_by 35 | yield :keep, "Keeping #{Config.cleaner.keep} images in each group" 36 | verify!(:images, 'Cleanup Images', resources, &Proc.new) 37 | aborted!(&Proc.new) 38 | 39 | resources.values 40 | .sort { |a, b| a[:properties]['name'] <=> b[:properties]['name'] } 41 | .each do |image| 42 | yield :remove, "Image #{image[:id]} (#{image[:properties]['name']})", :red 43 | Model::Cleaner.images.resources.delete(image[:id]) 44 | 45 | next unless commit? 46 | Util.ec2.deregister_image(:image_id => image[:id]) 47 | end 48 | 49 | rescue Aws::EC2::Errors::ServiceError => e 50 | exceptions << Util::AwsException.new('Cleanerup Images', e) 51 | yield(*exceptions.last.status) 52 | end 53 | 54 | def snapshots! 55 | resources = Model::Cleaner.snapshots.unused 56 | 57 | yield :snapshots, "Found #{resources.length} Snapshots to remove" 58 | verify!(:snapshots, 'Cleanup Snapshots', resources, &Proc.new) 59 | aborted!(&Proc.new) 60 | 61 | resources.keys.sort.each do |id| 62 | yield :remove, "Snapshot #{id}", :red 63 | Model::Cleaner.snapshots.resources.delete(id) 64 | 65 | next unless commit? 66 | Util.ec2.delete_snapshot(:snapshot_id => id) 67 | end 68 | rescue Aws::EC2::Errors::ServiceError => e 69 | exceptions << Util::AwsException.new('Cleanerup Snapshots', e) 70 | yield(*exceptions.last.status) 71 | end 72 | 73 | def volumes! 74 | resources = Model::Cleaner.volumes.unused 75 | 76 | yield :volumes, "Found #{resources.length} Volumes to remove" 77 | verify!(:volumes, 'Cleanup Volumes', resources, &Proc.new) 78 | aborted!(&Proc.new) 79 | 80 | resources.keys.sort.each do |id| 81 | yield :remove, "Volume #{id}", :red 82 | Model::Cleaner.volumes.resources.delete(id) 83 | 84 | next unless commit? 85 | Util.ec2.delete_volume(:volume_id => id) 86 | end 87 | rescue Aws::EC2::Errors::ServiceError => e 88 | exceptions << Util::AwsException.new('Cleanerup Volumes', e) 89 | yield(*exceptions.last.status) 90 | end 91 | 92 | def commit? 93 | Config.cleaner.commit && !@abort 94 | end 95 | 96 | def aborted? 97 | Config.cleaner.commit && @abort 98 | end 99 | 100 | def exceptions 101 | @exceptions ||= [] 102 | end 103 | 104 | private 105 | 106 | def aborted! 107 | yield :aborted, 'The following resources will NOT be removed because'\ 108 | ' safty constraints have not been met!', :yellow if aborted? 109 | end 110 | 111 | def verify!(resource_name, task, resources) 112 | if Config.cleaner.commit 113 | yield :commit, 'This is not a dry-run. Press CTL-C to stop! '\ 114 | '(continuing in 5 seconds)', :red 115 | 116 | sleep(5) ## Give $USER a few seconds to stop 117 | end 118 | 119 | return unless resources.size >= Config.cleaner.limits[resource_name] 120 | 121 | ex = Util::LimitException.new(resource_name, task, resources) 122 | yield(*ex.status) 123 | 124 | if Config.cleaner.force 125 | yield :force, 'Limits will be ignored. Press CTL-C to stop! '\ 126 | '(continuing in 5 seconds)', :red 127 | sleep(5) ## Give $USER a few seconds to stop 128 | 129 | return 130 | end 131 | 132 | exceptions << ex 133 | @abort = true 134 | end 135 | end 136 | end 137 | end 138 | end 139 | -------------------------------------------------------------------------------- /lib/builderator/util.rb: -------------------------------------------------------------------------------- 1 | require 'pathname' 2 | 3 | module Builderator 4 | ## 5 | # Shared helper methods 6 | ## 7 | module Util 8 | GEM_PATH = Pathname.new(__FILE__).join('../../..').expand_path 9 | WORKSPACE = '.builderator'.freeze 10 | VENDOR = 'vendor'.freeze 11 | 12 | class << self 13 | ## 14 | # Transform helpers 15 | ## 16 | def to_array(arg) 17 | arg.is_a?(Array) ? arg : [arg] 18 | end 19 | 20 | def from_tags(aws_tags) 21 | {}.tap { |tt| aws_tags.each { |t| tt[t.key.to_s] = t.value } } 22 | end 23 | 24 | ## 25 | # Relative path from working directory 26 | ## 27 | def relative_path(*relative) 28 | Pathname.pwd.join(*(relative.flatten.map(&:to_s))).expand_path 29 | end 30 | 31 | def workspace(*relative) 32 | relative_path(WORKSPACE, relative) 33 | end 34 | 35 | def vendor(*relative) 36 | workspace(VENDOR, relative) 37 | end 38 | 39 | def source_path(*relative) 40 | GEM_PATH.join(*(relative.flatten.map(&:to_s))).expand_path 41 | end 42 | 43 | ## 44 | # Set-filter helpers 45 | ## 46 | def filter(resources, filters = {}) 47 | resources.select do |_, r| 48 | _filter_reduce(r, filters) 49 | end 50 | end 51 | 52 | def filter!(resources, filters = {}) 53 | resources.select! do |_, r| 54 | _filter_reduce(r, filters) 55 | end 56 | 57 | resources 58 | end 59 | 60 | ## 61 | # AWS Clients 62 | ## 63 | def ec2(region = Config.aws.region, credentials=nil) 64 | options = { :region => region } 65 | 66 | # Don't memoize if supplying explicit credentials as it could be an assumed role for a remote account 67 | if credentials.nil? 68 | clients["ec2-#{region}"] ||= Aws::EC2::Client.new(options) 69 | else 70 | Aws::EC2::Client.new options.merge(credentials) 71 | end 72 | end 73 | 74 | def ecr(region = Config.aws.region) 75 | clients["ecr-#{region}"] ||= Aws::ECR::Client.new(:region => region) 76 | end 77 | 78 | def asg(region = Config.aws.region) 79 | clients["asg-#{region}"] ||= Aws::AutoScaling::Client.new(:region => region) 80 | end 81 | 82 | def remove_security_group(region = Config.aws.region, group_id = nil) 83 | if region.nil? 84 | puts "Dry-run; skipping delete of group_id #{group_id}" 85 | return 86 | end 87 | if group_id.nil? 88 | puts "Not removing security group" 89 | return 90 | end 91 | ec2 = ec2(region) 92 | Retryable.retryable(:sleep => lambda { |n| 4**n }, :tries => 4, :on => [Aws::EC2::Errors::ServiceError, Aws::EC2::Errors::InternalError]) do |retries, _| 93 | resp = ec2.delete_security_group(group_id: group_id) 94 | end 95 | puts "Deleted SecurityGroup #{group_id}" 96 | end 97 | 98 | def get_security_group_id(region = Config.aws.region) 99 | group_id = nil 100 | if region.nil? 101 | group_id = 'sg-DRYRUNSG' 102 | puts "Dry-run; skipping create and returning #{group_id}" 103 | return group_id 104 | end 105 | ec2 = ec2(region) 106 | group = nil 107 | require 'open-uri' 108 | external_ip = open('http://checkip.amazonaws.com').read.strip 109 | cidr_ip = external_ip + '/32' 110 | 111 | # Create a security group with microsecond timestamp (to avoid collisions when using seconds) 112 | ts_usec = (Time.now.to_f*1000000).to_i 113 | Retryable.retryable(:sleep => lambda { |n| 4**n }, :tries => 4, :on => [Aws::EC2::Errors::ServiceError, Aws::EC2::Errors::InternalError]) do |retries, _| 114 | resp = ec2.create_security_group(group_name: "BuilderatorSecurityGroupSSHOnly-#{ts_usec}", 115 | description: "Created by Builderator at #{Time.now}") 116 | group_id = resp[:group_id] 117 | 118 | resp = ec2.describe_security_groups(group_ids: [group_id]) 119 | groups = resp[:security_groups] 120 | group = groups.first 121 | 122 | # Ensure the group_id has the right permissions 123 | resp = ec2.authorize_security_group_ingress(group_id: group_id, 124 | ip_protocol: 'tcp', 125 | from_port: 22, 126 | to_port: 22, 127 | cidr_ip: cidr_ip) 128 | end 129 | puts "Created SecurityGroup #{group_id}" 130 | group_id 131 | end 132 | 133 | private 134 | 135 | def clients 136 | @clients ||= {} 137 | end 138 | 139 | def _filter_reduce(resource, filters) 140 | filters.reduce(true) do |memo, (k, v)| 141 | ## Allow for negation with a leading `~` 142 | if v[0] == '~' 143 | memo && (!resource[:properties].include?(k.to_s) || resource[:properties][k.to_s] != v[1..-1]) 144 | else 145 | memo && resource[:properties].include?(k.to_s) && resource[:properties][k.to_s] == v 146 | end 147 | end 148 | end 149 | end 150 | end 151 | end 152 | 153 | require_relative './util/aws_exception' 154 | require_relative './util/limit_exception' 155 | -------------------------------------------------------------------------------- /lib/builderator/tasks/vagrant.rb: -------------------------------------------------------------------------------- 1 | require 'thor' 2 | 3 | require_relative '../interface/vagrant' 4 | require_relative '../patch/thor-actions' 5 | 6 | require_relative './berkshelf' 7 | 8 | module Builderator 9 | module Tasks 10 | ## 11 | # Wrap vagrant commands 12 | ## 13 | class Vagrant < Thor 14 | include Thor::Actions 15 | 16 | def self.exit_on_failure? 17 | true 18 | end 19 | 20 | desc 'configure [PROFILE]', 'Write a Vagrantfile into the project workspace' 21 | def configure(profile = :default) 22 | Config.profile.use(profile) 23 | invoke Tasks::Version, :current, [], options 24 | 25 | Interface.vagrant.write 26 | end 27 | 28 | desc 'local [PROFILE [ARGS ...]]', 'Start VirtualBox VM(s)' 29 | def local(profile = :default, *args) 30 | invoke :configure, [profile], options 31 | 32 | inside Interface.vagrant.directory do 33 | command = Interface.vagrant.command 34 | command << " up --provider=#{Config.profile.current.vagrant.local.provider} " 35 | command << args.join(' ') 36 | 37 | return run(command) if Interface.vagrant.bundled? 38 | run_without_bundler command 39 | end 40 | end 41 | 42 | desc 'ec2 [PROFILE [ARGS ...]]', 'Start EC2 instances' 43 | def ec2(profile = :default, *args) 44 | invoke :configure, [profile], options 45 | 46 | inside Interface.vagrant.directory do 47 | command = Interface.vagrant.command 48 | command << " up --provider=#{Config.profile.current.vagrant.ec2.provider} " 49 | command << args.join(' ') 50 | 51 | return run(command) if Interface.vagrant.bundled? 52 | run_without_bundler command 53 | end 54 | end 55 | 56 | desc 'provision [PROFILE [ARGS ...]]', 'Reprovision Vagrant VM(s)' 57 | def provision(profile = :default, *args) 58 | invoke :configure, [profile], options 59 | 60 | invoke Berkshelf, :vendor, [], options 61 | invoke :rsync, [profile], options 62 | 63 | inside Interface.vagrant.directory do 64 | command = Interface.vagrant.command 65 | command << " provision #{args.join(' ')}" 66 | 67 | return run(command) if Interface.vagrant.bundled? 68 | run_without_bundler command 69 | end 70 | end 71 | 72 | desc 'status [PROFILE [ARGS ...]]', 'Reprovision Vagrant VM(s)' 73 | def status(profile = :default, *args) 74 | invoke :configure, [profile], options 75 | 76 | inside Interface.vagrant.directory do 77 | command = Interface.vagrant.command 78 | command << " status #{args.join(' ')}" 79 | 80 | return run(command) if Interface.vagrant.bundled? 81 | run_without_bundler command 82 | end 83 | end 84 | 85 | desc 'ssh [PROFILE [ARGS ...]]', 'SSH into Vagrant VM(s)' 86 | def ssh(profile = :default, *args) 87 | invoke :configure, [profile], options 88 | 89 | inside Interface.vagrant.directory do 90 | command = Interface.vagrant.command 91 | command << " ssh #{args.join(' ')}" 92 | 93 | return run(command) if Interface.vagrant.bundled? 94 | run_without_bundler command, :childprocess => true 95 | end 96 | end 97 | 98 | desc 'rsync [PROFILE [ARGS ...]]', 'Sync resources to Vagrant VM(s)' 99 | def rsync(profile = :default, *args) 100 | invoke :configure, [profile], options 101 | 102 | inside Interface.vagrant.directory do 103 | command = Interface.vagrant.command 104 | command << " rsync #{args.join(' ')}" 105 | 106 | return run(command) if Interface.vagrant.bundled? 107 | run_without_bundler command 108 | end 109 | end 110 | 111 | desc 'destroy [PROFILE [ARGS ...]]', 'Destroy Vagrant VM(s)' 112 | method_option :force, :aliases => :f, :type => :boolean, :default => true 113 | def destroy(profile = :default, *args) 114 | invoke :configure, [profile], options 115 | 116 | inside Interface.vagrant.directory do 117 | command = Interface.vagrant.command 118 | command << " destroy #{args.join(' ')}" 119 | command << ' -f' if options['force'] 120 | 121 | return run(command) if Interface.vagrant.bundled? 122 | run_without_bundler command 123 | end 124 | end 125 | 126 | desc 'clean', 'Destroy VMs and clean up local files' 127 | method_option :force, :aliases => :f, :type => :boolean, :default => true 128 | def clean(profile = :default) 129 | destroy(profile) 130 | 131 | remove_dir Interface.vagrant.directory.join('.vagrant') 132 | remove_file Interface.vagrant.source 133 | end 134 | 135 | desc 'plugins [PROJECT=default]', 'Install plugins required for PROJECT' 136 | def plugins(project = :default) 137 | if Interface.vagrant.bundled? 138 | say 'Vagrant is already bundled. Required plugins are already part of the bundle as well' 139 | return 140 | end 141 | 142 | Config.generator.project.use(project) 143 | Config.generator.project.current.vagrant.plugin.each do |pname, plugin| 144 | command = Interface.vagrant.command 145 | command << " plugin install #{ pname }" 146 | command << " --plugin-version #{ plugin.version }" if plugin.has?(:version) 147 | 148 | return run(command) if Interface.vagrant.bundled? 149 | run_without_bundler command 150 | end 151 | end 152 | end 153 | end 154 | end 155 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Builderator [![Build Status](https://api.travis-ci.org/rapid7/builderator.svg?branch=master)](https://travis-ci.org/rapid7/builderator) 2 | 3 | Orchestration and configuration of the code development life-cycle. 4 | 5 | ## Commands 6 | 7 | Run `build help` for a complete enumeration of Builderator's commands. 8 | 9 | ### `local [PROFILE = default]` 10 | 11 | Provision a local VM using Vagrant and, by default, VirtualBox. Uses Berkshelf to fetch cookbooks, and Chef to provision the VM. 12 | 13 | ### `ec2 [PROFILE = default]` 14 | 15 | Provision an EC2 VM using Vagrant. Same workflow as `local` using the `vagrant-aws` plugin. 16 | 17 | ### `image [PROFILE = default]` 18 | 19 | Use [Packer](https://www.packer.io) to build an image(s) for the specified profile. 20 | 21 | By default, the generated images are copied to their respective `ami_regions` and tagged. Supply the `--no-copy` flag to keep the new image from being automatically copied to its configured regions. 22 | 23 | ## Configuration 24 | 25 | Configuration can be loaded from DSL files as well as JSON and command line arguments. By default, Builderator searches in your home directory (`$HOME/.builderator/Buildfile`) and the working directory (`./Builderator`) for DSL files. Configuration sources are layered and flattened into a single DSL in the following order: 26 | 27 | * Global defaults defined in the Builderator sources 28 | * `Config.defaults` set by plugins, tasks, etc. in code 29 | * `$HOME/.builderator/Buildfile` 30 | * `./Buildfile` 31 | * `Config.overrides` set by plugins, tasks, etc. in code 32 | * CLI arguments loaded from Thor 33 | 34 | [Additional documentation](docs/configuration.md) describes the configuration DSL interface. 35 | 36 | ## Integrations 37 | 38 | Builderator integrates with other tools, including [Berkshelf](http://berkshelf.com), [Vagrant](https://www.vagrantup.com/), and [Packer](https://www.packer.io/), to orchestrate workflows by generating `Berksfile`s, `Vagrantfile`s, and JSON strings for Packer. This means that you can replace all of these files in your project with a single `Buildfile`. 39 | 40 | ### Berkshelf 41 | 42 | The Berkshelf integration generates a Berksfile. The following configuration 43 | values are supported. 44 | 45 | #### solver 46 | 47 | The generated Berksfile uses the `:gecode` dependency resolver by default. To 48 | use the `:ruby` solver, add this snippet to your Buildfile. 49 | 50 | ```ruby 51 | berkshelf do |berks| 52 | berks.solver :ruby 53 | end 54 | ``` 55 | 56 | For help debugging cookbook dependency issues, you can set the `DEBUG_RESOLVER` 57 | environment variable and use the `--debug` flag when running `build berks` 58 | sucommands. 59 | 60 | ```bash 61 | export DEBUG_RESOLVER=1 62 | build berks vendor --debug 63 | ``` 64 | 65 | ### Packer 66 | 67 | The Packer integration generates a Packer JSON configuration and passes it to STDIN of `packer build -`. 68 | 69 | ### Vagrant 70 | 71 | Builderator shells out to Vagrant to provision VMs on demand. Run 72 | 73 | ```sh 74 | build vagrant plugins 75 | ``` 76 | 77 | to install required plugins for Builderator. 78 | 79 | ## Versioning 80 | 81 | Builderator can automatically detect versions from SCM tags, increment the latest version of an SCM branch by a variety of steps, and create new SCM tags for new versions. 82 | 83 | If the SCM has no version tags, Builderator will try to read the version from a VERSION file in the repository. 84 | 85 | [Additional documentation](docs/versioning.md) describes CLI commands, configuration, and detailed behavior. 86 | 87 | ## Generators 88 | 89 | Builderator includes a task to generate common project trees from configuration definitions and templates. 90 | 91 | Each type of project is configurable via the project collection in the `generator` namespace: 92 | 93 | ```ruby 94 | generator.project :default do |default| 95 | default.ruby.version '2.1.5' 96 | default.builderator.version '~> 1.0' 97 | 98 | default.vagrant do |vagrant| 99 | vagrant.install false 100 | vagrant.version 'v1.8.0' 101 | 102 | vagrant.plugin 'vagrant-aws' 103 | vagrant.plugin 'vagrant-omnibus' 104 | end 105 | 106 | default.resource :berksfile do |berksfile| 107 | berksfile.path 'Berksfile', 'Berksfile.lock' 108 | berksfile.action :rm 109 | end 110 | 111 | default.resource :buildfile do |buildfile| 112 | buildfile.path 'Buildfile' 113 | buildfile.action :create 114 | buildfile.template 'template/Buildfile.erb' 115 | end 116 | 117 | # ... 118 | end 119 | ``` 120 | 121 | Valid actions for resources include `:ignore`, `:create` (update only if missing), `:sync` (create or update with prompt), and `:rm`. `:create` and `:sync` actions require a valid template source. 122 | 123 | By default, the `generator` subcommand includes a `default` project which removes Vagrant, Berkshelf, and Packer configurations. 124 | 125 | ## Utilities 126 | 127 | Builderator includes two utilities to search for and clean up EC2 resources. 128 | 129 | ### `build-clean` 130 | 131 | The `build-clean` command prunes old EC2 resources in a region. The cleaner utility has [configurable](docs/configuration.md#namespace-cleaner) limits. For complete usage, run `build-clean help`. 132 | 133 | ### `build-data` 134 | 135 | The `build-data` utility is a command line interface for Builderator's internal resource lookup API. It has a limited query interface that allows filters to be constructed from a trailing list of argument: 136 | 137 | ```sh 138 | build-data image architecture x86_64 virtualization-type hvm ... 139 | ``` 140 | 141 | Pre-defined filters can also be applied with the `-f FILTER` flag. These currently include 142 | 143 | * `ubuntu-14.04-daily` 144 | * `windows-server2012-r2` 145 | * `windows-server2012-r2-core` 146 | 147 | Run `build-data help` for a complete listing of commands and arguments. 148 | -------------------------------------------------------------------------------- /lib/builderator/tasks.rb: -------------------------------------------------------------------------------- 1 | require 'thor' 2 | 3 | require_relative './config' 4 | require_relative './patch/thor-actions' 5 | 6 | # require_relative './tasks/cookbook' 7 | require_relative './tasks/vendor' 8 | require_relative './tasks/version' 9 | 10 | require_relative './tasks/berkshelf' 11 | require_relative './tasks/packer' 12 | require_relative './tasks/vagrant' 13 | 14 | module Builderator 15 | module Tasks 16 | ## 17 | # Top-level command line tasks 18 | ## 19 | class CLI < Thor 20 | include Thor::Actions 21 | VERSION = ['--version', '-v'].freeze 22 | 23 | map VERSION => :print_version 24 | 25 | def initialize(*_) 26 | super 27 | 28 | # Ignore existing config when we don't need it: i.e. `help` 29 | ignore_existing_config = ['help'] + VERSION 30 | return if ignore_existing_config.any? do |i| 31 | ARGV.include?(i) || ARGV.empty? 32 | end 33 | 34 | Config.argv(options) ## Load command flags 35 | Config.load(File.join(ENV['HOME'], '.builderator/Buildfile')) 36 | Config.load(Util.relative_path('Buildfile').to_s) 37 | Config.compile 38 | end 39 | 40 | desc '--version, -v', 'Print Builderator version' 41 | def print_version 42 | say Gem.loaded_specs['builderator'].version 43 | end 44 | 45 | def self.exit_on_failure? 46 | true 47 | end 48 | 49 | ## Globally enable/disable workspace cleanup 50 | class_option 'cleanup', :type => :boolean, :default => true 51 | 52 | ## 53 | # Tasks common to local, ec2, and ami builds 54 | ## 55 | desc 'prepare', 'Common preparation tasks for Vagrant and Packer' 56 | def prepare 57 | invoke Tasks::Vendor, :all, [], options 58 | invoke Tasks::Version, :current, [], options 59 | invoke Tasks::Berkshelf, :vendor, [], options 60 | 61 | # mvn package? 62 | # invoke Tasks::Cookbook, :prepare, [] 63 | end 64 | 65 | ## 66 | # Main commands. 67 | # 68 | # `local`, `ec2`, and `build` invoke sets of subcommands to build VMs or images 69 | ## 70 | desc 'local [PROFILE = default VAGRANT_ARGS]', 'Provision a local VM of PROFILE' 71 | def local(*args) 72 | prepare 73 | invoke Tasks::Vagrant, :local, args, options 74 | end 75 | 76 | desc 'ec2 [PROFILE = default VAGRANT_ARGS]', 'Provision an EC2 instance of PROFILE' 77 | def ec2(*args) 78 | prepare 79 | invoke Tasks::Vagrant, :ec2, args, options 80 | end 81 | 82 | desc 'image [PROFILE = default]', 'Build an AMI of PROFILE' 83 | method_option :debug, :type => :boolean 84 | method_option :remote_tag, :type => :boolean, :default => true 85 | method_option :copy, :type => :boolean, :default => true 86 | def image(profile = :default) 87 | prepare 88 | 89 | invoke Tasks::Packer, :build, [profile], options 90 | invoke Tasks::Packer, :copy, [profile], options if options['copy'] 91 | invoke Tasks::Packer, :remote_tag, [profile], options if options['remote_tag'] 92 | end 93 | 94 | desc 'container [PROFILE = docker]', 'Build a container of PROFILE' 95 | method_option :debug, :type => :boolean 96 | def container(profile = :docker) 97 | prepare 98 | 99 | invoke Tasks::Packer, :build, [profile], options 100 | end 101 | 102 | # desc 'cookbook SUBCOMMAND', 'Cookbook tasks' 103 | # subcommand 'cookbook', Tasks::Cookbook 104 | 105 | desc 'vendor SUBCOMMAND', 'Vendor loading tasks' 106 | subcommand 'vendor', Tasks::Vendor 107 | 108 | desc 'version SUBCOMMAND', 'Version management tasks' 109 | subcommand 'version', Tasks::Version 110 | 111 | ## 112 | # Helper/utility commands 113 | ## 114 | desc 'config', 'Print compiled configuration' 115 | def config(key = nil) 116 | invoke Tasks::Version, :current, [], options 117 | 118 | return puts Config.compiled.send(key).to_json unless key.nil? 119 | puts Config.compiled.to_json 120 | end 121 | 122 | desc 'clean', 'Run cleanup tasks' 123 | def clean 124 | invoke Tasks::Vagrant, :clean 125 | invoke Tasks::Berkshelf, :clean 126 | invoke Tasks::Vendor, :clean 127 | end 128 | 129 | ## 130 | # CLI Wrappers 131 | ## 132 | desc 'berks SUBCOMMAND', 'Berkshelf helpers' 133 | subcommand 'berks', Tasks::Berkshelf 134 | 135 | desc 'packer SUBCOMMAND', 'Run Packer tasks' 136 | subcommand 'packer', Tasks::Packer 137 | 138 | desc 'vagrant SUBCOMMAND', 'Run Vagrant tasks' 139 | subcommand 'vagrant', Tasks::Vagrant 140 | 141 | ## 142 | # Generator 143 | ## 144 | desc 'generate [PROJECT=default]', 'Run a generator' 145 | method_option 'build-name', :type => :string 146 | method_option :ignore, :type => :array 147 | method_option :sync, :type => :array 148 | method_option :rm, :type => :array 149 | def generate(project = :default) 150 | fail 'Please provide a valid build name with the `--build-name=VALUE` option!' unless Config.has?(:build_name) 151 | Config.generator.project.use(project) 152 | 153 | Config.generator.project.current.resource.each do |rname, resource| 154 | next if (options['ignore'] && options['ignore'].include?(rname.to_s)) || 155 | resource.action == :ignore 156 | 157 | if (options['sync'] && options['sync'].include?(rname.to_s)) || 158 | resource.action == :sync 159 | template resource.template, resource.path.first 160 | next 161 | end 162 | 163 | if (options['rm'] && options['rm'].include?(rname.to_s)) || 164 | resource.action == :rm 165 | resource.path.each { |rm| remove_file rm } 166 | next 167 | end 168 | 169 | ## Create 170 | template resource.template, resource.path.first, :skip => true 171 | end 172 | end 173 | end 174 | end 175 | end 176 | -------------------------------------------------------------------------------- /lib/builderator/config/defaults.rb: -------------------------------------------------------------------------------- 1 | require_relative './file' 2 | require_relative '../util' 3 | 4 | module Builderator 5 | # :nodoc 6 | module Config 7 | ## 8 | # Global predefined defaults 9 | ## 10 | GLOBAL_DEFAULTS = File.new({}, :source => 'GLOBAL_DEFAULTS') do 11 | cleanup true 12 | version '0.0.0' 13 | build_number 0 14 | 15 | autoversion do |autoversion| 16 | autoversion.create_tags false 17 | autoversion.search_tags true 18 | end 19 | 20 | local do |local| 21 | local.cookbook_path Util.workspace('cookbooks') 22 | end 23 | 24 | chef do |chef| 25 | chef.log_level :info 26 | chef.staging_directory '/var/chef' 27 | chef.version = '12.5.1' 28 | end 29 | 30 | cookbook do |cookbook| 31 | cookbook.path = '.' 32 | cookbook.add_source 'https://supermarket.chef.io' 33 | end 34 | 35 | berkshelf do |berkshelf| 36 | berkshelf.solver :gecode 37 | end 38 | 39 | aws.region 'us-east-1' 40 | 41 | profile :default do |profile| 42 | profile.log_level :info 43 | 44 | profile.vagrant do |vagrant| 45 | vagrant.ec2 do |ec2| 46 | ec2.provider :aws 47 | 48 | ec2.box 'dummy' 49 | ec2.box_url 'https://github.com/mitchellh/vagrant-aws/raw/master/dummy.box' 50 | 51 | ec2.region = 'us-east-1' 52 | ec2.instance_type 't2.micro' 53 | ec2.ssh_username 'ubuntu' 54 | ec2.ssh_host_attribute :public_ip_address 55 | end 56 | 57 | vagrant.local do |local| 58 | local.provider :virtualbox 59 | 60 | ## Atlas metadata for Ubuntu cloud-images: https://atlas.hashicorp.com/ubuntu/boxes/trusty64 61 | local.box 'ubuntu/trusty64' 62 | 63 | local.memory 1024 64 | local.cpus 2 65 | end 66 | end 67 | 68 | profile.packer do |packer| 69 | packer.build :default do |build| 70 | build.type 'amazon-ebs' 71 | build.region = 'us-east-1' 72 | build.instance_type 'c3.large' 73 | build.ami_virtualization_type 'hvm' 74 | 75 | build.ssh_username 'ubuntu' 76 | 77 | # Packer default is 300 seconds. Specify as a string to give units 78 | # such as s (seconds), ms (milliseconds), ns (nanoseconds), etc. 79 | # Ints will be interpreted as ns. Buyer beware. 80 | build.ssh_timeout '300s' 81 | 82 | # Clear the AMI and launch block device mappings for the default 83 | # c3.large instance type. 84 | build.ami_block_device_mappings [{ 85 | 'device_name' => '/dev/sdb', 86 | 'no_device' => true, 87 | }, { 88 | 'device_name' => '/dev/sdc', 89 | 'no_device' => true, 90 | }] 91 | build.launch_block_device_mappings [{ 92 | 'device_name' => '/dev/sdb', 93 | 'no_device' => true, 94 | }, { 95 | 'device_name' => '/dev/sdc', 96 | 'no_device' => true, 97 | }] 98 | 99 | build.ami_name [Config.build_name, Config.version, Config.build_number].reject(&:nil?).join('-') 100 | build.ami_description Config.description 101 | end 102 | end 103 | end 104 | 105 | profile :docker do |profile| 106 | profile.log_level :info 107 | 108 | profile.packer do |packer| 109 | packer.build :docker do |build| 110 | build.type 'docker' 111 | end 112 | end 113 | end 114 | 115 | cleaner do |cleaner| 116 | cleaner.commit false 117 | cleaner.force false 118 | cleaner.filters {} 119 | cleaner.sort_by 'creation_date' 120 | cleaner.keep 5 121 | 122 | cleaner.limits do |limits| 123 | limits.images 24 124 | limits.launch_configs 48 125 | limits.snapshots 24 126 | limits.volumes 8 127 | end 128 | end 129 | 130 | generator.project :default do |default| 131 | default.builderator.version '~> 1.0' 132 | 133 | default.vagrant do |vagrant| 134 | vagrant.install false 135 | vagrant.version 'v1.8.0' 136 | 137 | vagrant.plugin 'vagrant-aws' 138 | vagrant.plugin 'vagrant-omnibus' 139 | end 140 | 141 | default.resource :berksfile do |berksfile| 142 | berksfile.path 'Berksfile', 'Berksfile.lock' 143 | berksfile.action :rm 144 | end 145 | 146 | default.resource :buildfile do |buildfile| 147 | buildfile.path 'Buildfile' 148 | buildfile.action :create 149 | buildfile.template 'template/Buildfile.erb' 150 | end 151 | 152 | default.resource :cookbook do |cookbook| 153 | cookbook.path 'cookbook' 154 | cookbook.action :rm 155 | end 156 | 157 | default.resource :gemfile do |gemfile| 158 | gemfile.path 'Gemfile' 159 | gemfile.action :create 160 | gemfile.template 'template/Gemfile.erb' 161 | end 162 | 163 | default.resource :gitignore do |gitignore| 164 | gitignore.path '.gitignore' 165 | gitignore.action :create 166 | gitignore.template 'template/gitignore.erb' 167 | end 168 | 169 | default.resource :packerfile do |packerfile| 170 | packerfile.path 'packer.json', 'packer' 171 | packerfile.action :rm 172 | end 173 | 174 | default.resource :rubocop do |rubocop| 175 | rubocop.path '.rubocop.yml' 176 | rubocop.action :create 177 | rubocop.template 'template/rubocop.erb' 178 | end 179 | 180 | default.resource :readme do |readme| 181 | readme.path 'README.md' 182 | readme.action :create 183 | readme.template 'template/README.md.erb' 184 | end 185 | 186 | default.resource :thorfile do |thorfile| 187 | thorfile.path 'Thorfile' 188 | thorfile.action :rm 189 | end 190 | 191 | default.resource :vagrantfile do |vagrantfile| 192 | vagrantfile.path 'Vagrantfile' 193 | vagrantfile.action :rm 194 | end 195 | end 196 | end 197 | end 198 | end 199 | -------------------------------------------------------------------------------- /docs/configuration/profile.md: -------------------------------------------------------------------------------- 1 | Collection `profile` 2 | ==================== 3 | 4 | A profile is a combination of Chef parameters, and Vagrant and Packer configurations. Profiles should provide 5 | 6 | * `tags, type: hash` EC2 tags to apply to instances and AMIs 7 | * `log_level` Chef log-level. Default `:info` 8 | 9 | ## Collection `artifact` 10 | 11 | An externally managed resource to push to VMs and image builds, e.g. `bundle.tar.gz` from a Maven build. 12 | 13 | * `path` The workspace-rooted path to the artifact 14 | * `destination` The absolute path on the VM or image at which the artifact should be placed 15 | 16 | ## Namespace `chef` 17 | * `run_list, type: list, singular: run_list_item, unique: true` The Chef runlist for this profile 18 | * `environment` The Chef environment to load for this 19 | * `node_attrs, type: hash` A hash of node attributes for this profile 20 | * `binary_env` A space separated, KEY=VALUE formatted string to pass data 21 | into the provisioning process as environment variables. See 22 | [the vagrant docs](https://www.vagrantup.com/docs/provisioning/chef_common.html#binary_env) 23 | for more information. 24 | 25 | ## Collection `provisioner` 26 | 27 | Packer/Vagrant provisioner definitions. Currently only supports inline shell provisioners. 28 | 29 | * `inline, type: list` A list of shell provisioners 30 | * `environment_vars, type: list` A list of environment vars (in `KEY=VALUE` format) to pass to the shell script 31 | 32 | ## Namespace `packer` 33 | 34 | Packer configurations for this profile 35 | 36 | ### Collection `build` 37 | 38 | Add a Packer build 39 | 40 | * `type` the build provider (e.g. amazon-ebs, virtualbox, docker) 41 | 42 | Options for the `docker` builder: 43 | 44 | * `image` The base image for the Docker container that will be started 45 | 46 | The Docker builder requires one, and only one, of the following options: 47 | 48 | * `commit` The container will be committed to an image rather than exported 49 | * `discard` Throw away the container when the build is complete 50 | * `export_path` The path where the final container will be exported as a tar file 51 | 52 | There are additional options specified in [`lib/builderator/config/file.rb`](../../lib/builderator/config/file.rb) and 53 | in the Packer documentation. 54 | 55 | Options for the `amazon-ebs` builder: 56 | 57 | * `instance_type` the EC2 instance type to use 58 | * `source_ami` The source AMI ID for an `amazon-ebs` 59 | * `ssh_username` Default `ubuntu` 60 | * `ami_virtualization_type` Default `hvm` 61 | * `tagging_role` the name of an IAM role that exists in each remote account that allows the AMI to be retagged 62 | 63 | Example usage: 64 | 65 |
 66 |      profile bake: Config.profile(:default) do |bake|
 67 |        bake.packer do |packer|
 68 |          packer.build :default do |build|
 69 |            build.tagging_role 'CreateTagsOnAllImages'
 70 |            build.run_tags {
 71 |              'Owner' => 'ops',
 72 |              'Product' => 'packer'
 73 |            }
 74 |            build.run_volume_tags {
 75 |              'Owner' => 'ops',
 76 |              'Product' => 'packer'
 77 |            }
 78 |          end
 79 |        end
 80 |      end
 81 |   
82 | 83 | Example IAM policy in remote account: 84 | 85 |
 86 |   {
 87 |       "Version": "2012-10-17",
 88 |       "Statement": [
 89 |           {
 90 |               "Sid": "StmtId",
 91 |               "Effect": "Allow",
 92 |               "Action": [
 93 |                   "ec2:CreateTags"
 94 |               ],
 95 |               "Resource": [
 96 |                   "*"
 97 |               ]
 98 |           }
 99 |       ]
100 |   }
101 |   
102 | 103 | 104 | The above policy needs to be assigned to a role that enables a trust relationship with the account that builds the AMI: 105 | 106 |
107 |   {
108 |       "Version": "2012-10-17",
109 |       "Statement": [
110 |         {
111 |             "Effect": "Allow",
112 |             "Principal": {
113 |               "AWS": "arn:aws:iam::[ami_builder_account]:user/[ami_builder_user]"
114 |             },
115 |             "Action": "sts:AssumeRole"
116 |         }
117 |   }
118 |   
119 | 120 | ### Attribute `post_processors` 121 | 122 | Add a packer post-processor to run after the provisioning steps. This is a free-form 123 | attribute as there is no validation of elements defined here. If invalid configuration 124 | is supplied, errors will only appear when Packer tries to execute them. 125 | 126 | The `post_processors` attribute supports simple, complex, and sequence definitions. 127 | 128 | Example: 129 | 130 | ```ruby 131 | packer.post_processors [ 132 | [ 133 | # Complex 134 | { 135 | :type => 'docker-tag', 136 | :repository => 'rapid7/builderator', 137 | :tag => '1.2.2' 138 | }, 139 | 140 | 'docker-push' # Simple 141 | ], 142 | 143 | # Sequence 144 | [ 145 | { 146 | :type => 'docker-tag', 147 | :repository => 'rapid7/builderator', 148 | :tag => 'latest' 149 | }, 150 | 'docker-push' 151 | ] 152 | ] 153 | ``` 154 | 155 | 156 | ## TODO: Share accounts 157 | 158 | * `ami_name` Name for new AMI 159 | * `ami_description` Description for the new AMI 160 | 161 | 162 | ## Namespace `vagrant` 163 | 164 | Vagrant VM configurations 165 | 166 | ### Namespace `local` 167 | 168 | Parameters for a local VM build 169 | 170 | * `provider` Default `virtualbox` 171 | * `box` Default `ubuntu-14.04-x86_64` 172 | * `box_url` Default `https://cloud-images.ubuntu.com/vagrant/trusty/current/trusty-server-cloudimg-amd64-vagrant-disk1.box` 173 | 174 | * `cpus` Default 2 175 | * `memory` Default 1024 (MB) 176 | 177 | ## Namespace `ec2` 178 | 179 | Parameters for the provisioning EC2 nodes with Vagrant 180 | 181 | * `provider` Default `aws` 182 | * `box` Default `dummy` 183 | * `box_url` Default `https://github.com/mitchellh/vagrant-aws/raw/master/dummy.box` 184 | * `instance_type` 185 | * `source_ami` 186 | * `ssh_username` 187 | * `virtualization_type` 188 | * `iam_instance_profile_arn` 189 | * `subnet_id` 190 | * `security_groups, type: list, singular: security_group, unique: true` 191 | * `public_ip` 192 | * `ssh_host_attribute` One of: `[:public_ip_address, :dns_name, :private_ip_address]`, Default `:private_ip_address` 193 | 194 | [docker-tag]: https://www.packer.io/docs/post-processors/docker-tag.html 195 | [docker-import]: https://www.packer.io/docs/post-processors/docker-import.html 196 | [docker-save]: https://www.packer.io/docs/post-processors/docker-save.html 197 | [docker-push]: https://www.packer.io/docs/post-processors/docker-push.html 198 | -------------------------------------------------------------------------------- /lib/builderator/interface/packer.rb: -------------------------------------------------------------------------------- 1 | require_relative '../interface' 2 | 3 | module Builderator 4 | # :nodoc: 5 | class Interface 6 | class << self 7 | def packer 8 | @packer ||= Packer.new 9 | end 10 | end 11 | 12 | ## 13 | # Generate packer.json 14 | ## 15 | class Packer < Interface 16 | command 'packer' 17 | attr_reader :packerfile 18 | attr_reader :security_group_id 19 | 20 | def initialize(*_) 21 | super 22 | 23 | docker_builders = Config.profile.current.packer.build.select do |_, builder| 24 | builder.to_h[:type] == 'docker' 25 | end 26 | 27 | @packerfile ||= { 28 | :builders => [], 29 | :provisioners => [], 30 | 'post-processors' => [] 31 | }.tap do |json| 32 | Config.profile.current.packer.build.each do |_, build| 33 | build_hash = build.to_hash.tap do |b| 34 | b[:tags] = Config.profile.current.tags unless Config.profile.current.tags.empty? 35 | end 36 | 37 | if build_hash[:type] == 'docker' 38 | raise 'The Docker builder requires a base image' unless build_hash.key?(:image) 39 | 40 | # The Docker builder requires one and only one of 'commit', 'discard', or 'export_path' set 41 | if build_hash.keys.select { |k| [:commit, :discard, :export_path].include?(k) }.length != 1 42 | raise 'The Docker builder requires one and only one of `commit`, `discard`, or `export_path` attributes to be defined' 43 | end 44 | end 45 | 46 | # If we specify encrypted boot, packer won't allow ami_users. 47 | # See: https://github.com/MYOB-Technology/packer/blob/509cd7dcf194beb6ca6d0c39057f7490fa630d78/builder/amazon/common/ami_config.go#L59-L61 48 | 49 | # A PR (https://github.com/mitchellh/packer/pull/4023) has been 50 | # submitted to resolve this issue but we shouldn't remove this 51 | # until a new Packer release with this feature. 52 | if build_hash.key?(:encrypt_boot) 53 | build_hash.delete(:ami_users) 54 | end 55 | 56 | ## Support is missing for several regions in some versions of Packer 57 | # Moving this functionality into a task until we can confirm that Packer 58 | # has full support again. 59 | build_hash.delete(:ami_regions) 60 | 61 | # This is not directly supported by Packer 62 | build_hash.delete(:tagging_role) 63 | 64 | # Use a security group that doesn't suck if user didn't specify. 65 | # Note that @security_group_id in this class will only ever be the one created here, 66 | # and will be nil if the user specified their own 67 | if build_hash.key?(:security_group_ids) 68 | puts "Using SecurityGroups #{build_hash[:security_group_ids]}" 69 | elsif build_hash.key?(:security_group_id) 70 | puts "Using SecurityGroup #{build_hash[:security_group_id]}" 71 | else 72 | @security_group_id = Util.get_security_group_id(build_hash[:region]) 73 | build_hash[:security_group_id] = @security_group_id 74 | 75 | # Delete the security group created above when on builderator exit. 76 | # Note that for an unclean exit in which the instance was NOT terminated, 77 | # Amazon will refuse to delete the security group, as it is still attached 78 | # to an existing instance. This is unfortunate, but is equivalent to packer's 79 | # default behavior except that now you'll get an exception from aws-sdk. 80 | at_exit { Util.remove_security_group(build_hash[:region], @security_group_id) } 81 | end 82 | 83 | json[:builders] << build_hash 84 | end 85 | 86 | # post_processors must be a List of Rashes. 87 | post_processors = Config.profile.current.packer.post_processors.to_a 88 | json['post-processors'] = post_processors.first unless post_processors.empty? 89 | json.delete('post-processors') if json['post-processors'].compact.empty? 90 | 91 | ## Initialize the staging directory unless using the docker builder 92 | json[:provisioners] << { 93 | :type => 'shell', 94 | :inline => "sudo mkdir -p #{Config.chef.staging_directory}/cache && "\ 95 | "sudo chown $(whoami) -R #{Config.chef.staging_directory}" 96 | } if docker_builders.empty? 97 | 98 | # Only add artifact provisioners if they're defined 99 | Config.profile.current.artifact.each do |_, artifact| 100 | json[:provisioners] << _artifact_provisioner(artifact) 101 | end unless Config.profile.current.artifact.attributes.empty? 102 | 103 | # Only add chef provisioners if they're defined 104 | unless Config.profile.current.chef.attributes.empty? 105 | # There are certain options (staging directory, run as sudo) that don't apply 106 | # to the docker builder. 107 | json[:provisioners] << if docker_builders.empty? 108 | _chef_provisioner 109 | else 110 | _chef_provisioner_docker 111 | end 112 | end 113 | 114 | # After adding the default provisioners, we add any additional ones to the provisioners array 115 | Config.profile.current.provisioner.each do |name, provisioner| 116 | json[:provisioners] << provisioner.attributes.tap { |p| p[:type] = name.to_s } 117 | end 118 | 119 | json.delete(:provisioners) if json[:provisioners].empty? 120 | end 121 | end 122 | 123 | def render 124 | JSON.pretty_generate(packerfile) 125 | end 126 | 127 | private 128 | 129 | ## Upload artifacts to the build container 130 | def _artifact_provisioner(artifact) 131 | { 132 | :type => 'file', 133 | :source => artifact.path, 134 | :destination => artifact.destination 135 | } 136 | end 137 | 138 | def _chef_provisioner 139 | _chef_provisioner_base.merge( 140 | :staging_directory => Config.chef.staging_directory, 141 | :install_command => _chef_install_command 142 | ) 143 | end 144 | 145 | def _chef_provisioner_docker 146 | _chef_provisioner_base.merge( 147 | :prevent_sudo => true, 148 | :install_command => _chef_install_command(false) 149 | ) 150 | end 151 | 152 | def _chef_provisioner_base 153 | { 154 | :type => 'chef-solo', 155 | :run_list => Config.profile.current.chef.run_list, 156 | :cookbook_paths => Config.local.cookbook_path, 157 | :data_bags_path => Config.local.data_bag_path, 158 | :environments_path => Config.local.environment_path, 159 | :chef_environment => Config.profile.current.chef.environment, 160 | :json => Config.profile.current.chef.node_attrs, 161 | :staging_directory => Config.chef.staging_directory, 162 | :execute_command => _chef_execute_command(true), 163 | :install_command => _chef_install_command(true) 164 | } 165 | end 166 | 167 | def _chef_execute_command(sudo = true) 168 | template = sudo ? 'sudo ' : '' 169 | "#{template}chef-solo --no-color -c #{Config.chef.staging_directory}/solo.rb -j #{Config.chef.staging_directory}/node.json" 170 | end 171 | 172 | def _chef_install_command(sudo = true) 173 | template = sudo ? 'sudo ' : '' 174 | "curl -L https://www.chef.io/chef/install.sh | #{template}bash -s -- -v #{Config.chef.version}" 175 | end 176 | end 177 | end 178 | end 179 | -------------------------------------------------------------------------------- /lib/builderator/tasks/packer.rb: -------------------------------------------------------------------------------- 1 | require 'aws-sdk' 2 | require 'thor' 3 | require 'retryable' 4 | 5 | require_relative '../control/data' 6 | require_relative '../interface/packer' 7 | require_relative '../patch/thor-actions' 8 | 9 | module Builderator 10 | module Tasks 11 | ## 12 | # Wrap Packer commands 13 | ## 14 | class Packer < Thor 15 | include Thor::Actions 16 | 17 | def self.exit_on_failure? 18 | true 19 | end 20 | 21 | class_option :debug, :type => :boolean 22 | 23 | desc 'configure [PROFILE=default]', 'Generate a packer configuration' 24 | def configure(profile = :default) 25 | Config.profile.use(profile) 26 | 27 | invoke Tasks::Version, :current, [], options 28 | puts Interface.packer.render if options['debug'] 29 | end 30 | 31 | desc 'build [PROFILE=default *ARGS]', 'Run a build with the installed version of packer' 32 | def build(profile = :default, *args) 33 | invoke :configure, [profile], options 34 | run_with_input "#{Interface.packer.command} build #{options['debug'] ? '-debug -on-error=abort' : ''} - #{args.join('')}", Interface.packer.render 35 | end 36 | 37 | desc 'copy PROFILE', 'Copy AMIs generated by packer to other regions' 38 | def copy(profile) 39 | invoke :configure, [profile], options 40 | 41 | images.each do |image_name, (image, build)| 42 | parameters = { 43 | :source_region => Config.aws.region, 44 | :source_image_id => image.image_id, 45 | :name => image_name, 46 | :description => image.description 47 | } 48 | 49 | build.ami_regions.each do |region| 50 | say_status :copy, "image #{image_name} (#{image.image_id}) from #{Config.aws.region} to #{region}" 51 | copy_image(region, parameters) 52 | end 53 | end 54 | 55 | invoke :wait, [profile], options 56 | invoke :tag, [profile], options 57 | invoke :share, [profile], options 58 | end 59 | 60 | desc 'tag PROFILE', 'Tag AMIs in other regions' 61 | def tag(profile) 62 | invoke :configure, [profile], options 63 | 64 | images.each do |image_name, (image, build)| 65 | ## Add some additional tags about the regional source 66 | image.tags << { 67 | :key => 'source_region', 68 | :value => Config.aws.region 69 | } 70 | image.tags << { 71 | :key => 'source_ami', 72 | :value => image.image_id 73 | } 74 | 75 | build.ami_regions.each do |region| 76 | regional_image = find_image(region, image_name) 77 | 78 | say_status :tag, "AMI #{image_name} (#{regional_image.image_id}) in #{region}" 79 | Util.ec2(region).create_tags(:resources => [regional_image.image_id], :tags => image.tags) 80 | end 81 | end 82 | end 83 | 84 | desc 'wait PROFILE', 'Wait for copied AMIs to become available in another region' 85 | def wait(profile) 86 | invoke :configure, [profile], options 87 | 88 | waiting = true 89 | 90 | images.each do |image_name, (image, build)| 91 | say_status :wait, "for #{image.image_id} (#{image_name}) to be available in #{build.ami_regions.join(', ')}", :yellow 92 | end 93 | 94 | while waiting 95 | waiting = false 96 | 97 | images.each do |image_name, (image, build)| 98 | build.ami_regions.each do |region| 99 | regional_image = find_image(region, image_name) 100 | 101 | ## It takes a few seconds for the new AMI to show up in the `describe_images` response-set 102 | state = regional_image.nil? ? 'unknown' : regional_image.state 103 | image_id = regional_image.nil? ? 'unknown' : regional_image.image_id 104 | 105 | waiting = (state != 'available') || waiting 106 | status_color = case state 107 | when 'pending', 'unknown' then :yellow 108 | when 'available' then :green 109 | else :red 110 | end 111 | 112 | say_status :image, "#{image_id} (#{image.name}) is #{state} in #{region}", status_color 113 | end 114 | end 115 | 116 | ## If waiting == false, loop immediately to break 117 | sleep(20) if waiting 118 | end 119 | 120 | say_status :complete, 'All copied images are available' 121 | end 122 | 123 | desc 'remote_tag PROFILE', 'Apply existing tags to the AMI in remote AWS accounts' 124 | def remote_tag(profile) 125 | invoke :configure, [profile], options 126 | 127 | allowed_cred_keys = %w(access_key_id secret_access_key session_token) 128 | 129 | images.each do |image_name, (image, build)| 130 | ami_regions = build.ami_regions 131 | ami_regions << Config.aws.region 132 | ami_regions.uniq! 133 | ami_regions.each do |region| 134 | 135 | sts_client = Aws::STS::Client.new(region: region) 136 | 137 | if build.tagging_role.nil? 138 | say_status :complete, 'No remote tagging to be performed as no IAM role is defined' 139 | return 140 | end 141 | 142 | regional_image = find_image(region, image_name) 143 | 144 | build.ami_users.each do |account| 145 | role_arn = "arn:aws:iam::#{account}:role/#{build.tagging_role}" 146 | begin 147 | response = sts_client.assume_role( :role_arn => role_arn, :role_session_name => "tag-new-ami") 148 | raise "Could not assume role [#{role_arn}]. Perhaps it does not exist?" unless response.successful? 149 | rescue => e 150 | say_status :skip, "Got error when trying to assume role: #{e.message} - continuing." 151 | next 152 | end 153 | 154 | creds_hash = response.credentials.to_h.keep_if { |k,v| allowed_cred_keys.include?(k.to_s) } 155 | 156 | say_status :remote_tag, "Tag AMI #{image_name} (#{regional_image.image_id}) in #{region} (#{account})" 157 | Util.ec2(region, creds_hash) 158 | .create_tags(:dry_run => false, :resources => [regional_image.image_id], :tags => image.tags) 159 | end 160 | end 161 | end 162 | say_status :complete, 'Remote tagging complete' 163 | end 164 | 165 | desc 'share PROFILE', 'Share copied AMIs in other accounts' 166 | def share(profile) 167 | invoke :configure, [profile], options 168 | 169 | shared = false 170 | 171 | images.each do |image_name, (image, build)| 172 | build.ami_regions.each do |region| 173 | build.ami_users.each do |user| 174 | shared = true 175 | 176 | regional_image = find_image(region, image_name) 177 | 178 | say_status :share, "image #{image_name} (#{regional_image.image_id}) with #{user}" 179 | 180 | share_image_parameters = { 181 | :image_id => regional_image.image_id, 182 | :launch_permission => { 183 | :add => [ 184 | { 185 | :user_id => user 186 | } 187 | ] 188 | } 189 | } 190 | 191 | Util.ec2(region).modify_image_attribute(share_image_parameters) 192 | end 193 | end 194 | end 195 | say_status :complete, 'All images are shared' if shared 196 | end 197 | 198 | private 199 | 200 | ## Find details for generated images in current region 201 | def images 202 | Retryable.retryable(:sleep => lambda { |n| 4**n }, :tries => 4, :on => [NoMethodError]) do |retries, _| 203 | @images ||= Config.profile.current.packer.build.each_with_object({}) do |(_, build), memo| 204 | memo[build.ami_name] = [Control::Data.lookup(:image, :name => build.ami_name).first, build] 205 | end 206 | @images.length # Will throw NoMethodError if no images found; triggers retry 207 | end 208 | @images 209 | end 210 | 211 | def copy_image(region, params) 212 | Retryable.retryable(:sleep => lambda { |n| 4**n }, :tries => 4, :on => [Aws::EC2::Errors::ServiceError]) do |retries, _| 213 | say_status :error, 'Error copying image', :red if retries > 0 214 | Util.ec2(region).copy_image(params) 215 | end 216 | end 217 | 218 | def find_image(region, image_name) 219 | filters = [{ 220 | :name => 'name', 221 | :values => [image_name] 222 | }] 223 | 224 | image = nil 225 | Retryable.retryable(:sleep => lambda { |n| 4**n }, :tries => 4, :on => [Aws::EC2::Errors::ServiceError]) do |retries, _| 226 | say_status :error, 'Error finding image', :red if retries > 0 227 | image = Util.ec2(region).describe_images(:filters => filters).images.first 228 | end 229 | image 230 | end 231 | end 232 | end 233 | end 234 | -------------------------------------------------------------------------------- /docs/configuration.md: -------------------------------------------------------------------------------- 1 | Configuration DSL 2 | ================= 3 | 4 | Builderator's configuration language is a Ruby DSL, composed of `attributes`, which are grouped into `namespaces`. A `namespace` may be singular, or it may be part of a `collection`, in which case more than one named entry may be defined with the same `namespace`. 5 | 6 | Namespaces and collections can accessed with blocks, or with a fluent interface: 7 | 8 | ```ruby 9 | aws do |a| 10 | a.region = 'us-west-1' 11 | end 12 | 13 | ## Is the same as 14 | aws.region = 'us-west-1' 15 | ``` 16 | 17 | Collections are sets of named items. Like namespaces, they can be accessed with blocks, or a fluent interface: 18 | 19 | ```ruby 20 | profile :default do |default_profile| 21 | default_profile.chef.run_list 'apt:default', 'redis:server', 'app::server' 22 | end 23 | 24 | profile(:default).chef.environment 'development' 25 | profile(:prod).chef.environment 'production' 26 | ``` 27 | 28 | In the example above, the same collection item is accessed twice. A second item is also defined in the same collection. The final result looks like: 29 | 30 | ```json 31 | { 32 | "profile": { 33 | "default": { 34 | "chef": { 35 | "run_list": ["apt:default", "redis:server", "app::server"], 36 | "environment": "development" 37 | } 38 | }, 39 | "prod": { 40 | "chef": { 41 | "environment": "production" 42 | } 43 | } 44 | } 45 | } 46 | ``` 47 | 48 | Collections and namespaces may be nested indefinitely. 49 | 50 | ## Extending Collection Items 51 | 52 | A collection item can extend another item using a hash-notation: 53 | 54 | ```ruby 55 | profile :prod => Config.profile(:default) do |prod| 56 | prod.chef.environment = 'production' 57 | end 58 | ``` 59 | 60 | Following the example, above, the `prod` profile will now be pre-populated with all of the values in the `default` profile, which can be overridden: 61 | 62 | ```json 63 | { 64 | "profile": { 65 | "default": { 66 | "chef": { 67 | "run_list": ["apt:default", "redis:server", "app::server"], 68 | "environment": "development" 69 | } 70 | }, 71 | "prod": { 72 | "chef": { 73 | "run_list": ["apt:default", "redis:server", "app::server"], 74 | "environment": "production" 75 | } 76 | } 77 | } 78 | } 79 | ``` 80 | 81 | ## List-type Attributes 82 | 83 | Some configuration attributes are actually ordered sets of values. These are referred to as `list-type` attributes, and have some additional options: 84 | 85 | ```ruby 86 | chef.run_list 'apt:default', :mode => :override 87 | ``` 88 | 89 | The `:mode` parameter tells the configuration manager how how to compile a list-type attribute that is defined in two or more layers, or is modified in an extended collection item. Currently, two modes are implemented: 90 | 91 | * `:override` - Instructs the compiler to discard any elements that have been loaded from lower layers. This does not have any effect upon the behavior of the same attribute in higher layers, meaning that the current layer's override may be appended to or overridden by future layers, according to their `mode` parameter. 92 | * `:union` - Default behavior. Instructs the compiler to perform a set-union between the current layer's elements and the current set of elements compiled from lower layers. 93 | 94 | List-type attributes may also have an `appender method`, which allows elements to be appended to the current set _in that layer_. **List-type attributes do not have an `=` setter.** 95 | 96 | Because `chef.run_list` is a list-type attribute, we can tell Builderator to override the `default` profile's `run_list`: 97 | 98 | ```ruby 99 | profile :prod => Config.profile(:default) do |prod| 100 | prod.chef.environment = 'production' 101 | prod.chef.run_list 'apt:default', 'redis:server', 'app::server', 'app::tls', :mode => :override 102 | end 103 | ``` 104 | 105 | We could also append to `default`'s `run_list` without modifying `default`: 106 | 107 | ```ruby 108 | profile :prod => Config.profile(:default) do |prod| 109 | prod.chef.environment = 'production' 110 | prod.chef.run_list_item 'app::tls' 111 | end 112 | ``` 113 | 114 | Both of the above will result in the same compiled configuration: 115 | 116 | ```json 117 | { 118 | "profile": { 119 | "default": { 120 | "chef": { 121 | "run_list": ["apt:default", "redis:server", "app::server"], 122 | "environment": "development" 123 | } 124 | }, 125 | "prod": { 126 | "chef": { 127 | "run_list": ["apt:default", "redis:server", "app::server", "app::tls"], 128 | "environment": "production" 129 | } 130 | } 131 | } 132 | } 133 | ``` 134 | 135 | ## Helper Methods 136 | 137 | * `lookup(source, query)` - Query an external data-source for a value inline. 138 | * Source `:image`: Return an array of EC2 instances, sorted by `creation_date` (See http://docs.aws.amazon.com/sdkforruby/api/Aws/EC2/Client.html#describe_images-instance_method) 139 | 140 | * `vendored(name, path)` - Return the absolute path to `path` in the named vendor resource. _ Hint: Use this helper to reference Builderator policy files and Chef data_bag and environment sources in an external repository._ 141 | 142 | * `relative(path)` - Return the absolute path to `path` relative to the calling Buildfile _Hint: Use this helper to reference templates included with a vendored policy._ 143 | 144 | ## Configuration Parameters 145 | 146 | * [Namespace `cookbook`](configuration/cookbook.md) 147 | * [Collection `profile`](configuration/profile.md) 148 | 149 | * `build_name, required: true` The name of the build 150 | * `build_number` Optional reference to the CI build number for this release 151 | * `build_url` Optional link the CI page for this release 152 | * `description` A short human-readable description of the build 153 | * `version` The version of this release of the build. Auto-populated by `autoversion` by default 154 | * `cleanup` Enable post-build cleanup tasks. Default `true` 155 | 156 | ### Namespace `autoversion` 157 | 158 | * `create_tags` During a release, automatically create and push new SCM tags 159 | * `search_tags` Use SCM tags to determine the current version of the build 160 | 161 | ### Namespace `chef` 162 | 163 | Global configurations for chef provisioners in Vagrant and Packer 164 | 165 | * `log_level` Chef client/solo log level 166 | * `staging_directory` the path in VMs and images that Chef artifacts should be mounted/copied to. Defaults to `/var/chef` 167 | * `version` The version of chef to install with Omnibus 168 | 169 | ### Namespace `local` 170 | 171 | Local paths used for build tasks 172 | 173 | * `cookbook_path` Path at which to vendor cookbooks. Default `.builderator/cookbooks` 174 | * `data_bag_path` and `environment_path` Paths that Chef providers should load data-bag and environment documents from. 175 | 176 | ### Collection `policy` 177 | 178 | Load additional attributes into the parent file from a relative path 179 | 180 | * `path` Load a DSL file, relative => true 181 | * `json` Load a JSON file relative => true 182 | 183 | ### Namespace `aws` 184 | 185 | AWS API configurations. _Hint: Configure these in `$HOME/.builderator/Buildfile`, or use a built-in credential source, e.g. ~/.aws/config!_ 186 | 187 | * `region` The default AWS region to use 188 | * `access_key` and `secret_key` A valid IAM key-pair 189 | 190 | ### Collection `vendor` 191 | 192 | Fetch remote artifacts for builds 193 | 194 | * Sources: 195 | * `path` Link to a local file/directory 196 | * `git` Fetch a git repository 197 | * `github` Fetch a git repository from a GitHub URI (e.g. `OWNER/REPO`) using the SSH protocol. You must have a valid SSH key configuration for public GitHub. 198 | * Git-specific parameters: 199 | * `remote` - The name of the remote repository at `git` or `github`. Defaults to `origin` 200 | * `branch` - The SCM branch to check out. Defaults to `master` 201 | * `tag` or `ref` - A SHA-ish or SCM tag to check out. Overrides `branch`. 202 | * `rel` Checkout a sub-directory of a git repository 203 | 204 | ### Namespace `cleaner` 205 | 206 | Configuration parameters for `build-clean` tasks 207 | 208 | #### Namespace `limits` 209 | 210 | Maximum number of resources to remove without manual override 211 | 212 | * `images` 213 | * `launch_configs` 214 | * `snapshots` 215 | * `volumes` 216 | 217 | ### Namespace `generator` 218 | 219 | Configurations for the `generator` task 220 | 221 | #### Collection `project` 222 | 223 | * `builderator.version` The version of Builderator to install with Bundler 224 | * `ruby.version` The version of ruby to require for Bundler and `rbenv`/`rvm` 225 | 226 | ##### Namespace `vagrant` 227 | 228 | * `install` Boolean, include the vagrant gem from GitHub `mitchellh/vagrant` 229 | * `version` The version of Vagrant to use from GitHub, if `install` is true 230 | 231 | ###### Collection `plugin` 232 | 233 | Vagrant plugins to install, either with the `build vagrant plugin` command, for a system-wide installation of Vagrant, or in the generated Gemfile if `install` is true 234 | 235 | ##### Collection `resource` 236 | 237 | Add a managed file to the project definition 238 | 239 | * `action` One of 240 | * `:create` Add a file from a template if it's missing 241 | * `:sync` Create or update a file from a template, stopping to ask for instructions if the file exists and the templated output does not match 242 | * `:ignore` Do nothing 243 | * `:rm` Delete a file if it exists 244 | * `path` One or more path in the working directory the this resource manages. Action `:rm` will delete multiple files, while `:create` and `:sync` will only use the first element of the list as their destination. 245 | * `template` The path to an ERB template. Must be an absolute path: use the [helpers](#helpers) in the Buildfile namespace to extend paths inline. 246 | -------------------------------------------------------------------------------- /lib/builderator/config/attributes.rb: -------------------------------------------------------------------------------- 1 | require 'forwardable' 2 | require 'json' 3 | 4 | require_relative './rash' 5 | require_relative './list' 6 | 7 | module Builderator 8 | module Config 9 | ## 10 | # Shared Attribute Mixin 11 | ## 12 | class Attributes 13 | ## 14 | # DSL Definition 15 | ## 16 | class << self 17 | def attribute(attribute_name, default = nil, **options) 18 | ## 19 | # Helpers for Array-type attributes 20 | ## 21 | if options[:type] == :list 22 | define_method(attribute_name) do |*arg, **run_options| 23 | ## Instantiate List if it doesn't exist yet. `||=` will always return a new Rash. 24 | @attributes[attribute_name] = Config::List.new(run_options) unless @attributes.has?(attribute_name, Config::List) 25 | 26 | unless arg.empty? 27 | @attributes[attribute_name].set(*arg.flatten) 28 | @attributes[attribute_name].set(*arg) if options[:flatten] == false 29 | end 30 | @attributes[attribute_name] 31 | end 32 | 33 | define_method(options[:singular]) do |*arg, **run_options| 34 | send(attribute_name, run_options).push(*arg.flatten) 35 | end if options.include?(:singular) 36 | 37 | return 38 | end 39 | 40 | ## 41 | # Helpers for Hash-type attributes 42 | ## 43 | if options[:type] == :hash 44 | define_method(attribute_name) do |arg = nil| 45 | ## Instantiate List if it doesn't exist yet. `||=` will always return a new Rash. 46 | @attributes[attribute_name] = Config::Rash.new unless @attributes.has?(attribute_name, Config::Rash) 47 | 48 | dirty(@attributes[attribute_name].merge!(Config::Rash.coerce(arg)).any?) unless arg.nil? 49 | @attributes[attribute_name] 50 | end 51 | 52 | return 53 | end 54 | 55 | ## Getter/Setter 56 | define_method(attribute_name) do |*arg| 57 | set_or_return(attribute_name, arg.first, default, options) 58 | end 59 | 60 | ## Setter 61 | define_method("#{attribute_name}=") do |arg| 62 | set_if_valid(attribute_name, arg, options) 63 | end 64 | end 65 | 66 | ## 67 | # A Namespace is a singleton sub-node of the attribute-set 68 | # 69 | # e.g. `namespace :chef ...` maps to `attributes[:chef]` and adds a 70 | # method `chef(&block)` to the DSL which is used as follows: 71 | # 72 | # ``` 73 | # chef do 74 | # run_list 'foo', 'bar' 75 | # ... 76 | # end 77 | # ``` 78 | # 79 | # Multiple calls to the DSL method are safe and will 80 | # update the same sub-node. 81 | ## 82 | def namespace(namespace_name, &definition) 83 | namespace_class = Namespace.create(namespace_name, &definition) 84 | 85 | define_method(namespace_name) do |&block| 86 | nodes[namespace_name] ||= namespace_class.new( 87 | @attributes[namespace_name], 88 | :name => namespace_name, 89 | :parent => self, &block) 90 | end 91 | end 92 | 93 | ## 94 | # A Collection is a named-set of items in a sub-node of the attribute-set. 95 | # 96 | # Like Namespaces, Collections map to a top-level key, but they also have 97 | # multiple second-order keys: 98 | # 99 | # e.g. `collection :vagrant ...` adds a DSL method 100 | # `vagrant(name = :default, &block)` which maps to 101 | # `attributes[:vagrant][]` 102 | # 103 | # Multiple entities can be added to the collection by calling the DSL method 104 | # with unique `name` arguments. Multiple calls to the DSL method with the 105 | # same name argument will update the existing entity in place 106 | # 107 | # An entry can be defined as an extension of another node by passing a hash 108 | # as the instance name: `name => Config.node(:name)`. This will use the values 109 | # defined in `Config.node(:name)` as defaults for the new entry 110 | ## 111 | def collection(collection_name, &definition) 112 | collection_class = Collection.create(collection_name, &definition) 113 | 114 | define_method(collection_name) do |instance_name = nil, &block| 115 | extension_base = nil 116 | 117 | ## Allow extension to be defined as a key-value 118 | if instance_name.is_a?(Hash) 119 | extension_base = instance_name.first.last 120 | instance_name = instance_name.first.first 121 | end 122 | 123 | nodes[collection_name] ||= collection_class.new( 124 | @attributes[collection_name], 125 | :parent => self) 126 | 127 | return nodes[collection_name] if instance_name.nil? 128 | nodes[collection_name].fetch(instance_name, :extends => extension_base, &block) 129 | end 130 | end 131 | end 132 | 133 | extend Forwardable 134 | include Enumerable 135 | 136 | ## Delegate enumerables to underlying storage structure 137 | def_delegators :@attributes, :[], :fetch, 138 | :keys, :values, :has?, :each, 139 | :to_hash 140 | 141 | def seal 142 | attributes.seal 143 | self 144 | end 145 | 146 | def unseal 147 | attributes.unseal 148 | self 149 | end 150 | 151 | ## Get the root Attributes object 152 | def root 153 | return self if root? 154 | 155 | parent.root 156 | end 157 | 158 | def root? 159 | parent == self 160 | end 161 | 162 | ## All dirty state should aggregate at the root node 163 | def dirty(update = false) 164 | return @dirty ||= update if root? 165 | root.dirty(update) 166 | end 167 | 168 | def dirty!(set) 169 | @dirty = set 170 | end 171 | 172 | def ==(other) 173 | attributes == other.attributes 174 | end 175 | 176 | attr_reader :attributes 177 | attr_reader :nodes 178 | attr_reader :parent 179 | attr_reader :extends 180 | 181 | def initialize(attributes = {}, options = {}, &block) 182 | @attributes = Rash.coerce(attributes) 183 | @nodes = {} 184 | @block = block 185 | 186 | ## Track change status for consumers 187 | @parent = options.fetch(:parent, self) 188 | @extends = options[:extends] 189 | @dirty = false 190 | end 191 | 192 | ## Clear dirty state flag 193 | def clean 194 | @dirty = false 195 | end 196 | 197 | def reset! 198 | @attributes = Config::Rash.new 199 | @nodes = {} 200 | @dirty = false 201 | end 202 | 203 | def compile(evaluate = true) 204 | ## Underlay base values if present 205 | if extends.is_a?(Attributes) 206 | previous_state = attributes 207 | dirty_state = dirty 208 | 209 | attributes.merge!(extends.attributes) 210 | 211 | @block.call(self) if @block && evaluate 212 | nodes.each { |_, node| node.compile } 213 | 214 | root.dirty!(dirty_state || previous_state.diff(attributes).any?) 215 | 216 | return self 217 | end 218 | 219 | ## Compile this node and its children 220 | @block.call(self) if @block && evaluate 221 | nodes.each { |_, node| node.compile } 222 | 223 | self 224 | end 225 | 226 | def merge(other) 227 | dirty(attributes.merge!(other.attributes).any?) 228 | self 229 | end 230 | 231 | def to_json(*_) 232 | JSON.pretty_generate(to_hash) 233 | end 234 | 235 | protected 236 | 237 | def set_if_valid(key, arg, options = {}) 238 | ## TODO: define validation interface 239 | 240 | ## Mutation helpers 241 | 242 | # Input is a path relative to the working directory 243 | arg = Util.relative_path(arg).to_s if options[:relative] 244 | 245 | # Input is a path relative to the workspace 246 | arg = Util.workspace(arg).to_s if options[:workspace] 247 | 248 | ## Unchanged 249 | return if @attributes[key] == arg 250 | 251 | dirty(true) ## A mutation has occured 252 | @attributes[key] = arg 253 | end 254 | 255 | def set_or_return(key, arg = nil, default = nil, **options) 256 | if arg.nil? 257 | return @attributes[key] if @attributes.has?(key) 258 | 259 | ## Default 260 | return if default.is_a?(NilClass) ## No default 261 | 262 | ## Allow a default to be a static value, or instantiated 263 | ## at call-time from a class (e.g. Array or Hash) 264 | default_value = default.is_a?(Class) ? default.new : default 265 | return default_value if @attributes.sealed 266 | 267 | return set_if_valid(key, default_value, options) 268 | end 269 | 270 | ## Set value 271 | set_if_valid(key, arg, options) 272 | end 273 | 274 | ## 275 | # Define a namespace for attributes 276 | ## 277 | class Namespace < Attributes 278 | class << self 279 | attr_accessor :name 280 | 281 | ## 282 | # Construct a new child-class to define the interface. The constructor 283 | # accepts an attributes argument, which should be a sub-node of the root 284 | # attribute-set. 285 | ## 286 | def create(namespace_name, &definition) 287 | space = Class.new(self) 288 | space.name = namespace_name 289 | 290 | ## Define DSL interface 291 | space.instance_eval(&definition) if definition 292 | 293 | space 294 | end 295 | end 296 | 297 | attr_reader :name 298 | attr_reader :collection 299 | 300 | def initialize(attributes, options = {}, &block) 301 | super(attributes, options, &block) 302 | 303 | @name = options.fetch(:name, self.class.name) 304 | @collection = options[:collection] 305 | end 306 | end 307 | 308 | ## 309 | # Enumerable wrapper for collections 310 | ## 311 | class Collection < Attributes 312 | class << self 313 | attr_accessor :name 314 | attr_accessor :namespace_class 315 | 316 | def create(collection_name, &definition) 317 | collection = Class.new(self) 318 | collection.name = collection_name 319 | collection.namespace_class = Namespace.create(collection_name, &definition) 320 | 321 | collection 322 | end 323 | end 324 | 325 | ## Allow a single instance to be selected 326 | attr_reader :current 327 | def use(instance_name) 328 | @current = fetch(instance_name) 329 | end 330 | 331 | ## Enumerable methods return namespace instances 332 | def each(&block) 333 | attributes.each_key do |instance_name| 334 | block.call(instance_name, fetch(instance_name)) 335 | end 336 | end 337 | 338 | def name 339 | self.class.name 340 | end 341 | 342 | ## Get namespace instances 343 | def fetch(instance_name, **options, &block) 344 | nodes[instance_name] ||= self.class.namespace_class.new( 345 | attributes[instance_name], 346 | :collection => self, 347 | :name => instance_name, 348 | :parent => self, 349 | :extends => options[:extends], &block) 350 | end 351 | alias_method :[], :fetch 352 | end 353 | end 354 | end 355 | end 356 | -------------------------------------------------------------------------------- /lib/builderator/config/file.rb: -------------------------------------------------------------------------------- 1 | require 'time' 2 | 3 | require_relative './attributes' 4 | require_relative '../control/data' 5 | require_relative '../util' 6 | 7 | # rubocop:disable Metrics/ClassLength 8 | 9 | module Builderator 10 | module Config 11 | ## 12 | # DSL Loader for a configuration file 13 | ## 14 | class File < Attributes 15 | class << self 16 | ## DSL Loaders 17 | def from_file(source, **options) 18 | new({}, options.merge(:type => :file, :source => source)) 19 | end 20 | 21 | def from_json(source, **options) 22 | new({}, options.merge(:type => :json, :source => source)) 23 | end 24 | 25 | def lookup_cache 26 | @lookup_cache ||= {} 27 | end 28 | end 29 | 30 | attr_reader :date ## Provide an authoritative, UTC-based date for any consumers 31 | attr_reader :source ## Where the instance was defined 32 | attr_reader :type ## How compile should populate attributes 33 | attr_reader :policies 34 | 35 | def initialize(attributes = {}, options = {}, &block) 36 | @policies = {} 37 | 38 | @date = Time.now.utc 39 | @type = options.fetch(:type, :code) 40 | @source = options.fetch(:source, nil) 41 | 42 | super(attributes, options, &block) 43 | end 44 | 45 | def compile 46 | clean ## Clear dirty flag before re-parsing file or block 47 | 48 | case @type 49 | when :file 50 | instance_eval(IO.read(source), source, 0) 51 | super(false) 52 | 53 | when :json 54 | update = Rash.coerce(JSON.parse(IO.read(source))) 55 | 56 | unless @attributes == update 57 | dirty(true) 58 | @attributes = update 59 | end 60 | else 61 | instance_eval(&@block) if @block 62 | super(false) 63 | 64 | end 65 | 66 | ## Overlay policies 67 | policy.each do |name, policy| 68 | if policy.has?(:path) 69 | next unless ::File.exist?(policy.path) 70 | policies[name] ||= self.class.from_file(policy.path) 71 | 72 | elsif policy.has?(:json) 73 | next unless ::File.exist?(policy.json) 74 | policies[name] ||= self.class.from_json(policy.json) 75 | end 76 | 77 | policies[name].compile 78 | dirty(policies[name].dirty) 79 | end 80 | 81 | self 82 | end 83 | 84 | ## Use the Data controller to fetch IDs from the EC2 API at compile time 85 | def lookup(source, query) 86 | self.class.lookup_cache[cache_key(query)] ||= Control::Data.lookup(source, query) 87 | end 88 | 89 | ## Helper to resolve paths to vendored files 90 | def vendored(name, *path) 91 | Util.vendor(name, *path) 92 | end 93 | 94 | ## Helper to resolve absolute paths relative to this `File`. 95 | ## Only works for `File`s with valid filesystem source attributes! 96 | def relative(*path) 97 | Pathname.new(source).join(*(['..', path].flatten)).expand_path 98 | end 99 | 100 | attribute :build_name, :required => true 101 | attribute :build_number 102 | attribute :build_url 103 | 104 | attribute :description 105 | attribute :version 106 | 107 | collection :policy do 108 | attribute :path, :relative => true 109 | attribute :json, :relative => true 110 | end 111 | 112 | ## 113 | # Enable/disable auto-versioning features 114 | ## 115 | namespace :autoversion do 116 | attribute :create_tags 117 | attribute :search_tags 118 | end 119 | 120 | ## 121 | # Local resource paths 122 | ## 123 | namespace :local do 124 | attribute :cookbook_path 125 | attribute :data_bag_path 126 | attribute :environment_path 127 | end 128 | 129 | namespace :chef do 130 | attribute :log_level 131 | attribute :staging_directory 132 | attribute :version 133 | end 134 | 135 | namespace :berkshelf do 136 | attribute :solver 137 | end 138 | 139 | ## 140 | # Cookbook build options 141 | ## 142 | namespace :cookbook do 143 | attribute :path 144 | attribute :berkshelf_config 145 | 146 | attribute :sources, :type => :list, :singular => :add_source 147 | attribute :metadata 148 | 149 | collection :depends do 150 | attribute :version 151 | 152 | attribute :git 153 | attribute :github 154 | attribute :branch 155 | attribute :tag 156 | attribute :ref 157 | attribute :rel 158 | 159 | attribute :path, :relative => true 160 | end 161 | end 162 | 163 | ## 164 | # AWS configurations 165 | ## 166 | namespace :aws do 167 | attribute :region 168 | attribute :access_key 169 | attribute :secret_key 170 | end 171 | 172 | collection :profile do 173 | attribute :tags, :type => :hash 174 | attribute :log_level 175 | 176 | ## 177 | # Sync'd artifacts 178 | ## 179 | collection :artifact do 180 | attribute :path, :relative => true 181 | attribute :destination 182 | end 183 | 184 | ## 185 | # Chef configurations 186 | ## 187 | namespace :chef do 188 | attribute :run_list, :type => :list, :singular => :run_list_item 189 | attribute :environment 190 | attribute :node_attrs, :type => :hash 191 | attribute :binary_env 192 | end 193 | 194 | collection :provisioner do 195 | attribute :inline, :type => :list 196 | attribute :environment_vars, :type => :list 197 | end 198 | 199 | ## 200 | # Packerfile 201 | # 202 | # This currently supports the AWS/EC2 and Docker builders. 203 | ## 204 | namespace :packer do 205 | collection :build do 206 | attribute :type 207 | 208 | ## Docker-specific attributes 209 | # Required 210 | attribute :image 211 | # One (and only one) of the following is required 212 | attribute :commit 213 | attribute :discard 214 | attribute :export_path 215 | 216 | # Optional attributes 217 | attribute :ami_block_device_mappings, :type => :list 218 | attribute :author 219 | attribute :aws_access_key 220 | attribute :aws_secret_key 221 | attribute :aws_token 222 | attribute :changes, :type => :list 223 | attribute :ecr_login 224 | attribute :launch_block_device_mappings, :type => :list 225 | attribute :login 226 | attribute :login_email 227 | attribute :login_username 228 | attribute :login_password 229 | attribute :login_server 230 | attribute :message 231 | attribute :privileged 232 | attribute :pull 233 | attribute :run_command, :type => :list 234 | attribute :volumes, :type => :hash 235 | 236 | ## EC2 Placement and Virtualization parameters 237 | attribute :region 238 | attribute :availability_zone 239 | attribute :vpc_id 240 | attribute :subnet_id 241 | 242 | attribute :instance_type 243 | attribute :ami_virtualization_type 244 | attribute :enhanced_networking 245 | attribute :security_group_ids, :type => :list, :singular => :security_group_id 246 | attribute :iam_instance_profile 247 | attribute :encrypt_boot 248 | 249 | attribute :source_ami 250 | attribute :user_data 251 | attribute :user_data_file 252 | 253 | attribute :windows_password_timeout 254 | 255 | ## Access parameters 256 | attribute :ssh_username 257 | attribute :ssh_timeout 258 | attribute :ssh_keypair_name 259 | attribute :ssh_private_key_file 260 | attribute :ssh_private_ip 261 | attribute :temporary_key_pair_name 262 | 263 | attribute :ami_name 264 | attribute :ami_description 265 | attribute :ami_users, :type => :list 266 | attribute :ami_regions, :type => :list 267 | 268 | # Tagging 269 | attribute :run_tags, :type => :hash 270 | attribute :run_volume_tags, :type => :hash 271 | 272 | ## Assumable role for tagging AMIs in remote accounts 273 | attribute :tagging_role 274 | end 275 | 276 | attribute :post_processors, :type => :list, :flatten => false 277 | end 278 | 279 | ## 280 | # Vagrantfile 281 | ## 282 | namespace :vagrant do 283 | namespace :local do 284 | attribute :provider 285 | attribute :box 286 | attribute :box_url 287 | 288 | attribute :cpus 289 | attribute :memory 290 | end 291 | 292 | collection :port do 293 | attribute :host 294 | attribute :guest 295 | end 296 | 297 | collection :sync do 298 | attribute :path, :relative => true 299 | attribute :destination 300 | end 301 | 302 | namespace :ec2 do 303 | attribute :provider 304 | attribute :box 305 | attribute :box_url 306 | 307 | attribute :region 308 | attribute :availability_zone 309 | attribute :subnet_id 310 | attribute :private_ip_address 311 | attribute :tags, :type => :hash 312 | 313 | attribute :instance_type 314 | attribute :security_groups, :type => :list 315 | attribute :iam_instance_profile_arn 316 | 317 | attribute :source_ami 318 | attribute :user_data 319 | 320 | attribute :ssh_username 321 | attribute :ssh_timeout 322 | attribute :keypair_name 323 | attribute :private_key_path 324 | 325 | attribute :associate_public_ip 326 | attribute :ssh_host_attribute 327 | attribute :instance_ready_timeout 328 | attribute :instance_check_interval 329 | end 330 | end 331 | end 332 | 333 | ## 334 | # Configure resources that must be fetched for a build 335 | ## 336 | collection :vendor do 337 | attribute :path, :relative => true 338 | 339 | attribute :git 340 | attribute :github 341 | attribute :remote 342 | attribute :branch 343 | attribute :tag 344 | attribute :ref 345 | attribute :rel 346 | end 347 | 348 | ## 349 | # Cleaner Parameters 350 | ## 351 | namespace :cleaner do 352 | attribute :commit 353 | attribute :force 354 | attribute :filters, Hash 355 | attribute :group_by, :type => :list 356 | attribute :sort_by 357 | attribute :keep 358 | 359 | namespace :limits do 360 | attribute :images 361 | attribute :launch_configs 362 | attribute :snapshots 363 | attribute :volumes 364 | end 365 | end 366 | 367 | ## 368 | # Generator Options 369 | ## 370 | namespace :generator do 371 | collection :project do 372 | namespace :builderator do 373 | attribute :version 374 | end 375 | 376 | namespace :ruby do 377 | attribute :version 378 | end 379 | 380 | namespace :vagrant do 381 | attribute :install 382 | attribute :version 383 | 384 | collection :plugin do 385 | attribute :version 386 | end 387 | end 388 | 389 | collection :resource do 390 | attribute :path, :type => :list 391 | attribute :action 392 | attribute :template 393 | end 394 | end 395 | end 396 | 397 | ## 398 | # Option to disable cleanup of build resources 399 | ## 400 | attribute :cleanup 401 | 402 | private 403 | 404 | ## Helper to generate unique, predictable keys for caching 405 | def cache_key(query) 406 | query.keys.sort.map { |k| "#{k}:#{query[k]}" }.join('|') 407 | end 408 | end 409 | end 410 | end 411 | 412 | # rubocop:enable Metrics/ClassLength 413 | -------------------------------------------------------------------------------- /spec/version_spec.rb: -------------------------------------------------------------------------------- 1 | require_relative './spec_helper' 2 | 3 | require 'json' 4 | require 'builderator/control/version' 5 | 6 | # rubocop:disable Metrics/ClassLength 7 | 8 | module Builderator 9 | module Control 10 | # :nodoc: 11 | class Version 12 | ## Test stub to load from an included JSON document 13 | module Test 14 | extend SCM 15 | 16 | def self.supported? 17 | true 18 | end 19 | 20 | def self._history 21 | JSON.parse( 22 | IO.read(::File.expand_path('../data/history.json', __FILE__)), 23 | :symbolize_names => true) 24 | end 25 | end 26 | 27 | ## Test stub with no history 28 | module NoHistory 29 | extend SCM 30 | 31 | def self.supported? 32 | true 33 | end 34 | 35 | def self._history 36 | [] 37 | end 38 | end 39 | 40 | ## Disable the Git provider 41 | module Git 42 | extend SCM 43 | 44 | def self.supported? 45 | false 46 | end 47 | end 48 | 49 | RSpec.describe Builderator::Control::Version do 50 | before(:context) do 51 | SCM.unregister(Test) 52 | SCM.register(NoHistory) 53 | end 54 | 55 | after(:context) do 56 | SCM.unregister(NoHistory) 57 | SCM.register(Test) 58 | end 59 | 60 | context 'current' do 61 | it 'provides an error message if no version is found' do 62 | version_file = Util.relative_path('VERSION') 63 | version_file.delete if version_file.exist? 64 | 65 | expect(SCM.tags.last).to be_nil 66 | expect { Version.current }.to raise_error RuntimeError 67 | end 68 | 69 | it 'falls back to VERSION file if no tags are found' do 70 | Util.relative_path('VERSION').write('1.2.3') 71 | expect(Version.current).to be == Version.from_string('1.2.3') 72 | end 73 | end 74 | end 75 | 76 | SCM.register(Test) 77 | 78 | RSpec.describe Builderator::Control::Version do 79 | context 'loading, parsing, and ordering of commits and tags' do 80 | it 'loads history from a provider' do 81 | expect(SCM.history).to be_a Array 82 | expect(SCM.history).to_not be_empty 83 | expect(SCM.history).to all be_a SCM::Commit 84 | end 85 | 86 | context 'parses semver strings correctly' do 87 | it 'parses a.b.c versions correctly' do 88 | version = Version.from_string('1.2.3') 89 | 90 | expect(version.major).to be == 1 91 | expect(version.minor).to be == 2 92 | expect(version.patch).to be == 3 93 | 94 | expect(version.is_prerelease).to be false 95 | expect(version.prerelease_name).to be_nil 96 | expect(version.prerelease_iteration).to be_nil 97 | expect(version.build).to be_nil 98 | end 99 | 100 | it 'parses pre-release versions correctly' do 101 | version = Version.from_string('1.2.3-pre.42') 102 | 103 | expect(version.major).to be == 1 104 | expect(version.minor).to be == 2 105 | expect(version.patch).to be == 3 106 | expect(version.is_prerelease).to be true 107 | expect(version.prerelease_name).to be == 'pre' 108 | expect(version.prerelease_iteration).to be == 42 109 | expect(version.build).to be_nil 110 | end 111 | 112 | it 'parses build versions correctly' do 113 | version = Version.from_string('1.2.3+build.9') 114 | 115 | expect(version.major).to be == 1 116 | expect(version.minor).to be == 2 117 | expect(version.patch).to be == 3 118 | expect(version.is_prerelease).to be false 119 | expect(version.prerelease_name).to be_nil 120 | expect(version.prerelease_iteration).to be_nil 121 | expect(version.build).to be == 9 122 | end 123 | 124 | it 'parses the complete spec' do 125 | version = Version.from_string('1.2.3-yolo.42+build.9') 126 | 127 | expect(version.major).to be == 1 128 | expect(version.minor).to be == 2 129 | expect(version.patch).to be == 3 130 | expect(version.is_prerelease).to be true 131 | expect(version.prerelease_name).to be == 'yolo' 132 | expect(version.prerelease_iteration).to be == 42 133 | expect(version.build).to be == 9 134 | end 135 | 136 | it 'does not fail on invalid specs' do 137 | expect(Version.from_string('1.2.lizard-alpha.42+build.9')).to be_nil 138 | expect(Version.from_string('1.2.3-alpha.42+taco.9')).to be_nil 139 | expect(Version.from_string('1.2.3-alpha.guacamole+build.9')).to be_nil 140 | expect(Version.from_string('1.2.3-alpha.42+build.beef')).to be_nil 141 | expect(Version.from_string('1.2.dog')).to be_nil 142 | expect(Version.from_string('1.cat.3')).to be_nil 143 | expect(Version.from_string('cow.2.3')).to be_nil 144 | end 145 | end 146 | 147 | it 'generates Version objects from commits' do 148 | expect(SCM.tags).to be_a Array 149 | expect(SCM.tags).to_not be_empty 150 | expect(SCM.tags).to all be_a Version 151 | end 152 | 153 | it 'finds the current version of the module' do 154 | expect(SCM.tags).to all be <= Version.current 155 | end 156 | end 157 | 158 | context 'bumping versions' do 159 | it 'attaches the correct SCM ref to new versions' do 160 | previous = Version.current 161 | 162 | Version.bump(:major) 163 | expect(Version.current.ref).to eq SCM.history.first.id 164 | expect(Version.current).to_not equal(previous) 165 | end 166 | 167 | it 'fails when an invalid step is passed to #bump' do 168 | expect { Version.bump(:lizard) }.to raise_error RuntimeError 169 | end 170 | 171 | context 'bump build' do 172 | it 'adds, increments, and resets build numbers' do 173 | expect(Version.current.build).to be_nil 174 | previous = Version.current 175 | 176 | Version.bump(:build) 177 | expect(Version.current).to_not equal(previous) 178 | expect(Version.current.build).to be == 0 179 | 180 | Version.bump(:build) 181 | expect(Version.current.build).to be == 1 182 | 183 | Version.bump(:patch) 184 | expect(Version.current.build).to be_nil 185 | end 186 | end 187 | 188 | context 'bump prerelease and release' do 189 | it 'creates and increments pre-releases from previous releases' do 190 | expect(Version.current.is_prerelease).to be false 191 | previous = Version.current 192 | 193 | Version.bump(:prerelease) 194 | expect(Version.current.patch).to be == (previous.patch + 1) 195 | expect(Version.current.is_prerelease).to be true 196 | expect(Version.current.prerelease_name).to be == 'alpha' 197 | expect(Version.current.prerelease_iteration).to be == 0 198 | 199 | Version.bump(:prerelease) 200 | expect(Version.current.prerelease_name).to be == 'alpha' 201 | expect(Version.current.prerelease_iteration).to be == 1 202 | end 203 | 204 | it 'creates and increments pre-releases with a specified name' do 205 | expect(Version.current.is_prerelease).to be true 206 | 207 | Version.bump(:prerelease, 'beta') 208 | expect(Version.current.prerelease_name).to be == 'beta' 209 | expect(Version.current.prerelease_iteration).to be == 0 210 | 211 | Version.bump(:prerelease) 212 | expect(Version.current.prerelease_name).to be == 'beta' 213 | expect(Version.current.prerelease_iteration).to be == 1 214 | 215 | Version.bump(:prerelease, 'beta') 216 | expect(Version.current.prerelease_name).to be == 'beta' 217 | expect(Version.current.prerelease_iteration).to be == 2 218 | end 219 | 220 | it 'removes pre-release parameters for a release' do 221 | expect(Version.current.is_prerelease).to be true 222 | previous = Version.current 223 | 224 | Version.bump(:release) 225 | expect(Version.current.patch).to be == previous.patch 226 | expect(Version.current.is_prerelease).to be false 227 | expect(Version.current.prerelease_name).to be_nil 228 | expect(Version.current.prerelease_iteration).to be_nil 229 | end 230 | end 231 | 232 | context 'bump major, minor, and patch-prerelease' do 233 | it 'creates a new patch version for a patch-prerelease' do 234 | Version.bump(:prerelease) 235 | previous = Version.current 236 | expect(Version.current.is_prerelease).to be true 237 | 238 | Version.bump('patch-prerelease') 239 | expect(Version.current.is_prerelease).to be true 240 | expect(Version.current.patch).to be == (previous.patch + 1) 241 | end 242 | 243 | it 'creates a new minor version for a minor-prerelease' do 244 | previous = Version.current 245 | expect(Version.current.is_prerelease).to be true 246 | 247 | Version.bump('minor-prerelease') 248 | expect(Version.current.is_prerelease).to be true 249 | expect(Version.current.patch).to be == 0 250 | expect(Version.current.minor).to be == (previous.minor + 1) 251 | end 252 | 253 | it 'creates a new major version for a major-prerelease' do 254 | previous = Version.current 255 | expect(Version.current.is_prerelease).to be true 256 | 257 | Version.bump('major-prerelease') 258 | expect(Version.current.is_prerelease).to be true 259 | expect(Version.current.patch).to be == 0 260 | expect(Version.current.minor).to be == 0 261 | expect(Version.current.major).to be == (previous.major + 1) 262 | end 263 | end 264 | 265 | context 'bump major, minor, and patch' do 266 | it 'creates a new patch version and resets build and prerelease parameters' do 267 | Version.bump(:prerelease) 268 | Version.bump(:build) 269 | previous = Version.current 270 | 271 | expect(Version.current.is_prerelease).to be true 272 | expect(Version.current.build).to_not be_nil 273 | 274 | Version.bump(:patch) 275 | 276 | expect(Version.current.is_prerelease).to be false 277 | expect(Version.current.build).to be_nil 278 | expect(Version.current.patch).to be == (previous.patch + 1) 279 | end 280 | 281 | it 'creates a new minor version and resets lower-precedence parameters' do 282 | Version.bump(:prerelease) 283 | Version.bump(:build) 284 | previous = Version.current 285 | 286 | expect(Version.current.is_prerelease).to be true 287 | expect(Version.current.build).to_not be_nil 288 | expect(Version.current.patch).to_not be == 0 289 | 290 | Version.bump(:minor) 291 | 292 | expect(Version.current.is_prerelease).to be false 293 | expect(Version.current.build).to be_nil 294 | expect(Version.current.patch).to be == 0 295 | expect(Version.current.minor).to be == (previous.minor + 1) 296 | end 297 | 298 | it 'creates a new major version and resets lower-precedence parameters' do 299 | Version.bump(:prerelease) 300 | Version.bump(:build) 301 | previous = Version.current 302 | 303 | expect(Version.current.is_prerelease).to be true 304 | expect(Version.current.build).to_not be_nil 305 | expect(Version.current.patch).to_not be == 0 306 | expect(Version.current.minor).to_not be == 0 307 | 308 | Version.bump(:major) 309 | 310 | expect(Version.current.is_prerelease).to be false 311 | expect(Version.current.build).to be_nil 312 | expect(Version.current.patch).to be == 0 313 | expect(Version.current.minor).to be == 0 314 | expect(Version.current.major).to be == (previous.major + 1) 315 | end 316 | end 317 | end 318 | end 319 | end 320 | end 321 | end 322 | # rubocop:enable Metrics/ClassLength 323 | --------------------------------------------------------------------------------