├── .dockerignore ├── .github ├── rvu │ └── labels.yaml └── workflows │ ├── push.yaml │ └── tag.yaml ├── .gitignore ├── .rubocop.yml ├── .ruby-version ├── Dockerfile ├── Gemfile ├── LICENSE ├── README.md ├── Rakefile ├── bin └── terrafying ├── default.nix ├── lib ├── terrafying.rb └── terrafying │ ├── aws.rb │ ├── cli.rb │ ├── dynamodb.rb │ ├── dynamodb │ ├── config.rb │ ├── named_lock.rb │ └── state.rb │ ├── generator.rb │ ├── lock.rb │ ├── state.rb │ ├── util.rb │ └── version.rb ├── repl ├── spec └── terrafying │ └── generator_spec.rb └── terrafying.gemspec /.dockerignore: -------------------------------------------------------------------------------- 1 | .drone.yml 2 | .git 3 | .gitignore 4 | -------------------------------------------------------------------------------- /.github/rvu/labels.yaml: -------------------------------------------------------------------------------- 1 | service.rvu.co.uk/owner: airship 2 | -------------------------------------------------------------------------------- /.github/workflows/push.yaml: -------------------------------------------------------------------------------- 1 | name: push 2 | 3 | on: 4 | push: 5 | branches: 6 | - '**' 7 | 8 | permissions: 9 | contents: read 10 | id-token: write 11 | 12 | jobs: 13 | test: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | - uses: ruby/setup-ruby@v1 18 | with: 19 | bundler-cache: true 20 | ruby-version: 3.2.2 21 | - run: bundle install 22 | - run: rake spec 23 | 24 | build: 25 | needs: test 26 | runs-on: ubuntu-latest 27 | steps: 28 | - uses: actions/checkout@v4 29 | - uses: ruby/setup-ruby@v1 30 | with: 31 | bundler-cache: true 32 | ruby-version: 3.2.2 33 | - run: rake version 34 | - run: rake build 35 | - uses: actions/upload-artifact@v4 36 | with: 37 | name: pkg 38 | path: pkg/ 39 | 40 | docker: 41 | needs: build 42 | runs-on: ubuntu-latest 43 | env: 44 | RUBYGEMS_API_KEY: ${{ secrets.RUBYGEMS_API_KEY }} 45 | steps: 46 | - uses: actions/checkout@v4 47 | - name: Login to Quay.io 48 | uses: docker/login-action@v3 49 | with: 50 | registry: quay.io 51 | username: ${{ secrets.QUAY_USERNAME }} 52 | password: ${{ secrets.QUAY_PASSWORD }} 53 | - uses: actions/download-artifact@v4 54 | with: 55 | name: pkg 56 | path: pkg/ 57 | - id: meta 58 | uses: docker/metadata-action@v5 59 | with: 60 | images: quay.io/uswitch/terrafying 61 | tags: type=sha,prefix=,format=long 62 | - uses: docker/build-push-action@v6 63 | with: 64 | context: . 65 | labels: ${{ steps.meta.outputs.labels }} 66 | push: true 67 | tags: ${{ steps.meta.outputs.tags }} 68 | -------------------------------------------------------------------------------- /.github/workflows/tag.yaml: -------------------------------------------------------------------------------- 1 | name: tag 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | permissions: 9 | contents: read 10 | id-token: write 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | env: 16 | GHA_TERRAFYING_VERSION: ${{ github.ref_name }} 17 | steps: 18 | - uses: actions/checkout@v4 19 | - uses: ruby/setup-ruby@v1 20 | with: 21 | bundler-cache: true 22 | ruby-version: 3.2.2 23 | - run: rake version 24 | - run: rake build 25 | - uses: actions/upload-artifact@v4 26 | with: 27 | name: pkg 28 | path: | 29 | pkg/ 30 | lib/ 31 | 32 | push: 33 | needs: build 34 | env: 35 | RUBYGEMS_API_KEY: ${{ secrets.RUBYGEMS_API_KEY }} 36 | GHA_TERRAFYING_VERSION: ${{ github.ref_name }} 37 | runs-on: ubuntu-latest 38 | steps: 39 | - uses: actions/checkout@v4 40 | - uses: ruby/setup-ruby@v1 41 | with: 42 | bundler-cache: true 43 | ruby-version: 3.2.2 44 | - uses: actions/download-artifact@v4 45 | with: 46 | name: pkg 47 | path: ./ 48 | - run: rake push 49 | 50 | docker: 51 | needs: build 52 | runs-on: ubuntu-latest 53 | env: 54 | RUBYGEMS_API_KEY: ${{ secrets.RUBYGEMS_API_KEY }} 55 | steps: 56 | - uses: actions/checkout@v4 57 | - name: Login to Quay.io 58 | uses: docker/login-action@v3 59 | with: 60 | registry: quay.io 61 | username: ${{ secrets.QUAY_USERNAME }} 62 | password: ${{ secrets.QUAY_PASSWORD }} 63 | - uses: actions/download-artifact@v4 64 | with: 65 | name: pkg 66 | path: ./ 67 | - id: meta 68 | uses: docker/metadata-action@v4 69 | with: 70 | images: quay.io/uswitch/terrafying 71 | tags: type=semver,pattern={{version}} 72 | - uses: docker/build-push-action@v4 73 | with: 74 | context: . 75 | labels: ${{ steps.meta.outputs.labels }} 76 | push: true 77 | tags: ${{ steps.meta.outputs.tags }} 78 | build-args: "TERRAFYING_VERSION=${{ github.ref_name }}" 79 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .gemconfig 2 | .nix-gems 3 | .idea 4 | Gemfile.lock 5 | 6 | /.bundle/ 7 | /.yardoc 8 | /_yardoc/ 9 | /coverage/ 10 | /doc/ 11 | /pkg/ 12 | /spec/reports/ 13 | /tmp/ 14 | 15 | # rspec failure tracking 16 | .rspec_status 17 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | AllCops: 2 | TargetRubyVersion: 3.2 3 | -------------------------------------------------------------------------------- /.ruby-version: -------------------------------------------------------------------------------- 1 | 3.2.2 2 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ruby:3.2.2-alpine3.18 2 | 3 | ARG TERRAFYING_VERSION=0.0.0 4 | 5 | RUN wget -O terraform.zip https://releases.hashicorp.com/terraform/0.11.14/terraform_0.11.14_linux_amd64.zip \ 6 | && unzip terraform.zip \ 7 | && install -m 755 terraform /usr/bin/terraform \ 8 | && install -d ${HOME}/.terraform.d/plugins/linux_amd64 \ 9 | && rm terraform terraform.zip 10 | 11 | COPY pkg /tmp 12 | 13 | RUN apk add --update --no-cache --virtual .terra-builddeps build-base ruby-dev \ 14 | && apk add --update --no-cache --virtual .terra-rundeps git bash \ 15 | && gem install /tmp/terrafying-${TERRAFYING_VERSION}.gem \ 16 | && install -d /terra \ 17 | && apk del .terra-builddeps \ 18 | && rm -rf /var/cache/apk/* 19 | 20 | WORKDIR /terra 21 | 22 | ENTRYPOINT [] 23 | CMD ["/bin/bash"] 24 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source 'https://rubygems.org' 4 | 5 | gemspec 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2016 uSwitch Limited 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # terrafying 2 | 3 | A small ruby dsl for [terraform](https://www.terraform.io), based on [terrafied](https://github.com/thattommyhall/terrafied). 4 | 5 | ## Setup 6 | 7 | - Ruby 2.2:ish 8 | - Terraform with the right version: https://www.terraform.io/downloads.html 9 | - OSX: `brew install terraform` 10 | - `bundle install` 11 | 12 | ### 13 | 14 | `Terraform::CLI_VERSION` checks for the correct version of terraform. Due to Hashicorp continually releasing versions and sometimes with breaking changes / bugs, we have locked the terraform version. As terraform continues to get updated we will attempt to keep this version up-to-date with the latest stable version of terraform. 15 | 16 | ## Usage 17 | 18 | The `terrafying` command is in `bin`: 19 | 20 | ``` 21 | $ ./bin/terrafying 22 | Commands: 23 | terrafying apply PATH # Apply changes to resources 24 | terrafying destroy PATH # Destroy resources 25 | terrafying graph PATH # Show execution graph 26 | terrafying help [COMMAND] # Describe available commands or one specific command 27 | terrafying import PATH ADDR ID # Import existing infrastructure into your Terraform state 28 | terrafying json PATH # Show terraform JSON 29 | terrafying list PATH # List resources defined 30 | terrafying plan PATH # Show execution plan 31 | terrafying show-state PATH # Show state 32 | terrafying use-local-state PATH # Migrate to using local state storage 33 | terrafying use-remote-state PATH # Migrate to using remote state storage 34 | 35 | Options: 36 | [--no-lock], [--no-no-lock] 37 | [--keep], [--no-keep] 38 | [--target=TARGET] 39 | ``` 40 | 41 | ### Creating a specification 42 | 43 | Create ruby file somewhere and declare resources as you wish. 44 | 45 | For example `example/main.rb` 46 | 47 | ```ruby 48 | Terrafying::Generator.generate { 49 | aws_security_group "example_group", { 50 | name: "example_group", 51 | description: "Allow all inbound traffic to port 80", 52 | vpc_id: 'vpc-ec0c118e', 53 | 54 | ingress: { 55 | from_port: 80, 56 | to_port: 80, 57 | protocol: "tcp", 58 | cidr_blocks: ["0.0.0.0/0"], 59 | } 60 | } 61 | } 62 | ``` 63 | 64 | ### Showing changes 65 | 66 | Run `./bin/terrafying plan example/main.rb`: 67 | 68 | ``` 69 | $ ./bin/terrafying plan example/main.rb 70 | Refreshing Terraform state prior to plan... 71 | 72 | 73 | The Terraform execution plan has been generated and is shown below. 74 | Resources are shown in alphabetical order for quick scanning. Green resources 75 | will be created (or destroyed and then created if an existing resource 76 | exists), yellow resources are being changed in-place, and red resources 77 | will be destroyed. 78 | 79 | Note: You didn't specify an "-out" parameter to save this plan, so when 80 | "apply" is called, Terraform can't guarantee this is what will execute. 81 | 82 | + aws_security_group.example_group 83 | description: "" => "Allow all inbound traffic to port 80" 84 | egress.#: "" => "" 85 | ingress.#: "" => "1" 86 | ingress.2214680975.cidr_blocks.#: "" => "1" 87 | ingress.2214680975.cidr_blocks.0: "" => "0.0.0.0/0" 88 | ingress.2214680975.from_port: "" => "80" 89 | ingress.2214680975.protocol: "" => "tcp" 90 | ingress.2214680975.security_groups.#: "" => "0" 91 | ingress.2214680975.self: "" => "0" 92 | ingress.2214680975.to_port: "" => "80" 93 | name: "" => "example_group" 94 | owner_id: "" => "" 95 | vpc_id: "" => "vpc-ec0c118e" 96 | 97 | 98 | Plan: 1 to add, 0 to change, 0 to destroy. 99 | ``` 100 | 101 | 102 | ### Applying changes 103 | 104 | Run `./bin/terrafying apply` 105 | 106 | ``` 107 | $ ./bin/terrafying apply example/main.rb 108 | aws_security_group.example_group: Creating... 109 | description: "" => "Allow all inbound traffic to port 80" 110 | egress.#: "" => "" 111 | ingress.#: "" => "1" 112 | ingress.2214680975.cidr_blocks.#: "" => "1" 113 | ingress.2214680975.cidr_blocks.0: "" => "0.0.0.0/0" 114 | ingress.2214680975.from_port: "" => "80" 115 | ingress.2214680975.protocol: "" => "tcp" 116 | ingress.2214680975.security_groups.#: "" => "0" 117 | ingress.2214680975.self: "" => "0" 118 | ingress.2214680975.to_port: "" => "80" 119 | name: "" => "example_group" 120 | owner_id: "" => "" 121 | vpc_id: "" => "vpc-ec0c118e" 122 | aws_security_group.example_group: Creation complete 123 | 124 | Apply complete! Resources: 1 added, 0 changed, 0 destroyed. 125 | 126 | The state of your infrastructure has been saved to the path 127 | below. This state is required to modify and destroy your 128 | infrastructure, so keep it safe. To inspect the complete state 129 | use the `terraform show` command. 130 | 131 | State path: terraform.tfstate 132 | ``` 133 | 134 | ## Locking 135 | 136 | To prevent concurrent changes to infrastructure any operations that 137 | mutate resources (apply/delete) are done under a distributed lock. 138 | 139 | If an operation fails completely or partially, you may be left still 140 | holding the lock. This is intended behaviour and will allow you to fix 141 | your specifications and continue applying your changes until you reach a 142 | consistent state. 143 | 144 | If you or someone else has a lock that you want to re-acquire or steal 145 | (if you deem it safe to do so) you can use the `-f` flag. 146 | 147 | ``` 148 | $ ./bin/terrafying apply -f example/main.rb 149 | ``` 150 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'bundler/gem_tasks' 4 | require 'yaml' 5 | 6 | rubygems_api_key = ENV['RUBYGEMS_API_KEY'] 7 | terrafying_version = Terrafying::VERSION 8 | 9 | begin 10 | require 'rspec/core/rake_task' 11 | 12 | RSpec::Core::RakeTask.new(:spec) 13 | task default: :spec 14 | rescue LoadError 15 | # no rspec available 16 | end 17 | 18 | desc 'Push gem to rubygems' 19 | task :push do 20 | gem_config = { rubygems_api_key: rubygems_api_key }.to_yaml 21 | File.open('.gemconfig', 'w') { |file| file.write(gem_config) } 22 | sh("gem push --config-file .gemconfig pkg/terrafying-#{terrafying_version}.gem") 23 | end 24 | 25 | desc 'Update the version for terrafying to DRONE_TAG. (0.0.0 if DRONE_TAG not set)' 26 | task :version do 27 | ver = ENV['GHA_TERRAFYING_VERSION'] || '0.0.0' 28 | version_file = 'lib/terrafying/version.rb' 29 | content = File.read(version_file).gsub(/0\.0\.0/, ver) 30 | File.open(version_file, 'w') { |file| file.puts content } 31 | end 32 | 33 | task push: :build 34 | -------------------------------------------------------------------------------- /bin/terrafying: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | require 'terrafying' 5 | 6 | Terrafying::Cli.start(ARGV) 7 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | let 2 | pkgs = import (builtins.fetchTarball { 3 | url = "https://github.com/NixOS/nixpkgs/archive/893186f4fd4c1e697b2bc38aa8f268f236d5ea02.tar.gz"; 4 | }) {}; 5 | 6 | stdenv = pkgs.stdenv; 7 | ruby = (pkgs.ruby_2_3_0.override { cursesSupport = true; }); 8 | 9 | unstable = builtins.tryEval ( import {} ); 10 | terraform = if unstable.success then 11 | import (unstable.value.path + "/pkgs/applications/networking/cluster/terraform/") { 12 | stdenv = unstable.value.stdenv; 13 | lib = unstable.value.lib; 14 | buildGoPackage = unstable.value.buildGoPackage; 15 | fetchFromGitHub = unstable.value.fetchFromGitHub; 16 | } 17 | else 18 | []; 19 | in stdenv.mkDerivation rec { 20 | name = "terrafying"; 21 | 22 | buildInputs = [ 23 | ruby 24 | pkgs.libxml2 25 | pkgs.libxslt 26 | pkgs.zlib 27 | pkgs.bzip2 28 | pkgs.openssl 29 | pkgs.readline 30 | terraform 31 | ] ++ (pkgs.lib.optionals (!stdenv.isDarwin) [ pkgs.glibc ]); 32 | 33 | src = ./.; 34 | 35 | installPhase = '' 36 | mkdir -p $out 37 | cp -R $src/* $out 38 | 39 | for i in `ls $out/bin`; do 40 | chmod +x $out/bin 41 | done 42 | ''; 43 | 44 | shellHook = '' 45 | export PKG_CONFIG_PATH=${pkgs.libxml2}/lib/pkgconfig:${pkgs.libxslt}/lib/pkgconfig:${pkgs.zlib}/lib/pkgconfig 46 | 47 | # gems 48 | mkdir -p .nix-gems 49 | export GEM_HOME=$PWD/.nix-gems 50 | export GEM_PATH=$GEM_HOME 51 | export PATH=$GEM_HOME/bin:$PATH 52 | ''; 53 | } 54 | 55 | -------------------------------------------------------------------------------- /lib/terrafying.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'fileutils' 4 | require 'logger' 5 | require 'pathname' 6 | require 'securerandom' 7 | require 'tempfile' 8 | 9 | require 'terrafying/aws' 10 | require 'terrafying/cli' 11 | require 'terrafying/generator' 12 | require 'terrafying/lock' 13 | require 'terrafying/version' 14 | require 'terrafying/state' 15 | 16 | module Terrafying 17 | class Config 18 | attr_reader :path, :scope 19 | 20 | def initialize(path, options) 21 | @path = File.expand_path(path) 22 | @options = options 23 | @scope = options[:scope] || scope_for_path(@path) 24 | 25 | warn "Scope: #{@scope}" 26 | 27 | load(path) 28 | end 29 | 30 | def list 31 | Terrafying::Generator.resource_names 32 | end 33 | 34 | def json 35 | Terrafying::Generator.pretty_generate 36 | end 37 | 38 | def plan 39 | exit_code = 1 40 | with_config do 41 | with_state(mode: :read) do 42 | exit_code = exec_with_optional_target 'plan' 43 | end 44 | end 45 | exit_code 46 | end 47 | 48 | def graph 49 | exit_code = 1 50 | with_config do 51 | with_state(mode: :read) do 52 | exit_code = exec_with_optional_target 'graph' 53 | end 54 | end 55 | exit_code 56 | end 57 | 58 | def validate 59 | exit_code = 1 60 | with_config do 61 | with_state(mode: :read) do 62 | exit_code = exec_with_optional_target 'validate' 63 | end 64 | end 65 | exit_code 66 | end 67 | 68 | def apply 69 | exit_code = 1 70 | with_config do 71 | with_lock do 72 | with_state(mode: :update) do 73 | exit_code = exec_with_optional_target "apply -auto-approve -backup=- #{@dir}" 74 | end 75 | end 76 | end 77 | exit_code 78 | end 79 | 80 | def destroy 81 | exit_code = 1 82 | with_config do 83 | with_lock do 84 | with_state(mode: :update) do 85 | exit_code = stream_command("terraform destroy -backup=- #{@dir}") 86 | end 87 | end 88 | end 89 | exit_code 90 | end 91 | 92 | def show_state 93 | puts(State.store(self).get) 94 | end 95 | 96 | def use_remote_state 97 | with_lock do 98 | local = State.local(self) 99 | state = local.get 100 | State.remote(self).put(state) if state 101 | local.delete 102 | end 103 | end 104 | 105 | def use_local_state 106 | with_lock do 107 | remote = State.remote(self) 108 | state = remote.get 109 | State.local(self).put(state) if state 110 | end 111 | end 112 | 113 | def import(addr, id) 114 | exit_code = 1 115 | with_config do 116 | with_lock do 117 | with_state(mode: :update) do 118 | exit_code = exec_with_optional_target "import -backup=- #{@dir} #{addr} #{id}" 119 | end 120 | end 121 | end 122 | exit_code 123 | end 124 | 125 | private 126 | 127 | def lock_timeout 128 | "-lock-timeout=#{@options[:lock_timeout]}" if @options[:lock_timeout] 129 | end 130 | 131 | def targets 132 | @options[:target].split(',').map { |target| "-target=#{target}" }.join(' ') if @options[:target] 133 | end 134 | 135 | def exec_with_optional_target(command, *args) 136 | exec_with_args(command, targets, lock_timeout, *args) 137 | end 138 | 139 | def exec_with_args(command, *args) 140 | stream_command("terraform #{command} #{args.join(' ')}") 141 | end 142 | 143 | def with_config(&block) 144 | abort('***** ERROR: You must have terraform installed to run this gem *****') unless terraform_installed? 145 | check_version 146 | name = File.basename(@path, '.*') 147 | dir = File.join(git_toplevel, 'tmp', SecureRandom.uuid) 148 | terraform_files = File.join(git_toplevel, '.terraform/') 149 | unless Dir.exist?(terraform_files) 150 | abort("***** ERROR: No .terraform directory found. Please run 'terraform init' to install plugins *****") 151 | end 152 | FileUtils.mkdir_p(dir) 153 | output_path = File.join(dir, name + '.tf.json') 154 | FileUtils.cp_r(terraform_files, dir) 155 | Dir.chdir(dir) do 156 | File.write(output_path, Terrafying::Generator.pretty_generate) 157 | yield block 158 | ensure 159 | FileUtils.rm_rf(dir) unless @options[:keep] 160 | end 161 | end 162 | 163 | def with_lock(&block) 164 | lock_id = nil 165 | begin 166 | lock = if @options[:no_lock] 167 | Locks.noop 168 | else 169 | Locks.dynamodb(scope) 170 | end 171 | 172 | lock_id = if @options[:force] 173 | lock.steal 174 | else 175 | lock.acquire 176 | end 177 | yield block 178 | 179 | # If block raises any exception we will still hold on to lock 180 | # after process exits. This is actually what we want as 181 | # terraform may have succeeded in updating some resources, but 182 | # not others so we need to manually get into a consistent 183 | # state and then re-run. 184 | lock.release(lock_id) 185 | end 186 | end 187 | 188 | def with_state(opts, &block) 189 | return yield(block) unless @options[:dynamodb] 190 | 191 | store = State.store(self) 192 | 193 | begin 194 | state = store.get 195 | File.write(State::STATE_FILENAME, state) if state 196 | rescue StandardError => e 197 | raise "Error retrieving state for config #{self}: #{e}" 198 | end 199 | 200 | yield block 201 | 202 | begin 203 | store.put(IO.read(State::STATE_FILENAME)) if opts[:mode] == :update 204 | rescue StandardError => e 205 | raise "Error updating state for config #{self}: #{e}" 206 | end 207 | end 208 | 209 | def scope_for_path(_path) 210 | top_level_path = Pathname.new(git_toplevel) 211 | Pathname.new(@path).relative_path_from(top_level_path).to_s 212 | end 213 | 214 | def git_toplevel 215 | @top_level ||= begin 216 | top_level = `git rev-parse --show-toplevel` 217 | raise "Unable to find .git directory top level for '#{@path}'" if top_level.empty? 218 | 219 | File.expand_path(top_level.chomp) 220 | end 221 | end 222 | 223 | def check_version 224 | if terraform_version != Terrafying::CLI_VERSION 225 | abort("***** ERROR: You must have v#{Terrafying::CLI_VERSION} of terraform installed to run any command (you are running v#{terraform_version}) *****") 226 | end 227 | end 228 | 229 | def terraform_installed? 230 | which('terraform') 231 | end 232 | 233 | def terraform_version 234 | `terraform -v`.split("\n").first.split('v').last 235 | end 236 | 237 | def stream_command(cmd) 238 | IO.popen(cmd) do |io| 239 | while (line = io.gets) 240 | puts line.gsub('\n', "\n").gsub('\\"', '"') 241 | end 242 | end 243 | $CHILD_STATUS.exitstatus 244 | end 245 | 246 | # Cross-platform way of finding an executable in the $PATH. 247 | # 248 | # which('ruby') #=> /usr/bin/ruby 249 | def which(cmd) 250 | exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : [''] 251 | ENV['PATH'].split(File::PATH_SEPARATOR).each do |path| 252 | exts.each do |ext| 253 | exe = File.join(path, "#{cmd}#{ext}") 254 | return exe if File.executable?(exe) && !File.directory?(exe) 255 | end 256 | end 257 | nil 258 | end 259 | end 260 | end 261 | -------------------------------------------------------------------------------- /lib/terrafying/aws.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'aws-sdk-autoscaling' 4 | require 'aws-sdk-ec2' 5 | require 'aws-sdk-elasticloadbalancingv2' 6 | require 'aws-sdk-route53' 7 | require 'aws-sdk-s3' 8 | require 'aws-sdk-sts' 9 | require 'aws-sdk-pricing' 10 | require 'aws-sdk-kafka' 11 | require 'json' 12 | 13 | Aws.use_bundled_cert! 14 | 15 | module Terrafying 16 | module Aws 17 | class Ops 18 | attr_reader :region 19 | 20 | def initialize(region) 21 | half_jitter = lambda { |c| 22 | sleep_time = 0.5 * (2**c.retries) 23 | Kernel.sleep(Kernel.rand((sleep_time / 2)..sleep_time)) 24 | } 25 | 26 | ::Aws.config.update( 27 | region: region, 28 | retry_limit: 7, 29 | retry_backoff: half_jitter 30 | ) 31 | 32 | @autoscaling_client = ::Aws::AutoScaling::Client.new 33 | @ec2_resource = ::Aws::EC2::Resource.new 34 | @ec2_client = ::Aws::EC2::Client.new 35 | @elb_client = ::Aws::ElasticLoadBalancingV2::Client.new 36 | @route53_client = ::Aws::Route53::Client.new 37 | @s3_client = ::Aws::S3::Client.new 38 | @sts_client = ::Aws::STS::Client.new 39 | @pricing_client = ::Aws::Pricing::Client.new(region: 'us-east-1') # no AWS Pricing endpoint in Europe 40 | @msk_client = ::Aws::Kafka::Client.new 41 | @region = region 42 | end 43 | 44 | def account_id 45 | @account_id_cache ||= @sts_client.get_caller_identity.account 46 | end 47 | 48 | def all_regions 49 | @all_regions ||= @ec2_client.describe_regions.regions.map(&:region_name) 50 | end 51 | 52 | def all_security_groups 53 | @all_security_groups ||= @ec2_resource.security_groups.to_a 54 | end 55 | 56 | def security_group(name) 57 | @security_groups ||= {} 58 | @security_groups[name] ||= 59 | begin 60 | warn "Looking up id of security group '#{name}'" 61 | groups = all_security_groups.select { |g| g.group_name == name }.take(2) 62 | if groups.count == 1 63 | groups.first.id 64 | elsif groups.count < 1 65 | raise "No security group with name '#{name}' was found." 66 | elsif groups.count > 1 67 | raise "More than one security group with name '#{name}' found: " + groups.join(', ') 68 | end 69 | end 70 | end 71 | 72 | def security_group_in_vpc(vpc_id, name) 73 | @security_groups_in_vpc ||= {} 74 | @security_groups_in_vpc[vpc_id + name] ||= 75 | begin 76 | warn "Looking up id of security group '#{name}'" 77 | groups = all_security_groups.select { |g| g.vpc_id == vpc_id && g.group_name == name }.take(2) 78 | if groups.count == 1 79 | groups.first.id 80 | elsif groups.count < 1 81 | raise "No security group with name '#{name}' was found." 82 | elsif groups.count > 1 83 | raise "More than one security group with name '#{name}' found: " + groups.join(', ') 84 | end 85 | end 86 | end 87 | 88 | def security_group_by_tags(tags) 89 | @security_groups_by_tags ||= {} 90 | @security_groups_by_tags[tags] ||= 91 | begin 92 | groups = all_security_groups.select { |g| g.tags.any? { |t| t.key == tags.keys && t.value == tags.values } }.take(2) 93 | if groups.count == 1 94 | groups.first.id 95 | elsif groups.count < 1 96 | raise "No security group with tags '#{tags}' was found." 97 | elsif groups.count > 1 98 | raise "More than one security group with tags '#{tags}' found: " + groups.join(', ') 99 | end 100 | end 101 | end 102 | 103 | def instance_profile(name) 104 | @instance_profiles ||= {} 105 | @instance_profiles[name] ||= 106 | begin 107 | resource = ::Aws::IAM::Resource.new 108 | warn "Looking up id of instance profile '#{name}'" 109 | # unfortunately amazon don't let us filter for profiles using 110 | # a name filter, for now we have enumerate and filter manually 111 | coll = resource.instance_profiles 112 | profiles = [] 113 | profiles = coll.select { |p| p.instance_profile_name =~ /#{name}/ } 114 | 115 | if profiles.count == 1 116 | profiles.first.instance_profile_id 117 | elsif profiles.count < 1 118 | raise "No instance profile with name '#{name}' was found." 119 | elsif profiles.count > 1 120 | raise "More than one instance profile with name '#{name}' found: " + profiles.join(', ') 121 | end 122 | end 123 | end 124 | 125 | def route_table_for_subnet(subnet_id) 126 | @route_table_for_subnet ||= {} 127 | @route_table_for_subnet[subnet_id] ||= 128 | begin 129 | resp = @ec2_client.describe_route_tables( 130 | filters: [ 131 | { name: 'association.subnet-id', values: [subnet_id] } 132 | ] 133 | ) 134 | 135 | route_tables = resp.route_tables 136 | 137 | if route_tables.count == 1 138 | route_tables.first 139 | elsif route_tables.count < 1 140 | raise "No route table for subnet '#{subnet_id}' was found." 141 | elsif profiles.count > 1 142 | raise "More than route table for subnet '#{subnet_id}' found: " + route_tables.join(', ') 143 | end 144 | end 145 | end 146 | 147 | def route_table_for_vpc(vpc_id) 148 | @route_table_for_vpc ||= {} 149 | @route_table_for_vpc[vpc_id] ||= 150 | begin 151 | resp = @ec2_client.describe_route_tables( 152 | filters: [ 153 | { name: 'association.main', values: ['true'] }, 154 | { name: 'vpc-id', values: [vpc_id] } 155 | ] 156 | ) 157 | 158 | route_tables = resp.route_tables 159 | 160 | if route_tables.count == 1 161 | route_tables.first 162 | elsif route_tables.count < 1 163 | raise "No route table for vpc '#{vpc_id}' was found." 164 | elsif profiles.count > 1 165 | raise "More than route table for vpc '#{vpc_id}' found: " + route_tables.join(', ') 166 | end 167 | end 168 | end 169 | 170 | def nat_gateways_for_vpc(vpc_id) 171 | @nat_gateways_for_vpc ||= {} 172 | @nat_gateways_for_vpc[vpc_id] ||= 173 | begin 174 | resp = @ec2_client.describe_nat_gateways( 175 | filter: [ 176 | { name: 'vpc-id', values: [vpc_id] } 177 | ] 178 | ) 179 | 180 | nat_gateways = resp.nat_gateways 181 | 182 | if nat_gateways.count >= 1 183 | nat_gateways 184 | elsif nat_gateways.count < 1 185 | raise "No nat-gateways for vpc #{vpc_id} were found" 186 | end 187 | end 188 | end 189 | 190 | def security_groups(*names) 191 | names.map { |n| security_group(n) } 192 | end 193 | 194 | def security_groups_in_vpc(vpc_id, *names) 195 | names.map { |n| security_group_in_vpc(vpc_id, n) } 196 | end 197 | 198 | def subnet(name) 199 | @subnets ||= {} 200 | @subnets[name] ||= 201 | begin 202 | warn "Looking up id of subnet '#{name}'" 203 | subnets = @ec2_resource.subnets( 204 | filters: [ 205 | { 206 | name: 'tag:Name', 207 | values: [name] 208 | } 209 | ] 210 | ).limit(2) 211 | if subnets.count == 1 212 | subnets.first.id 213 | elsif subnets.count < 1 214 | raise "No subnet with name '#{name}' was found." 215 | elsif subnets.count > 1 216 | raise "More than one subnet with this name '#{name}' found : " + subnets.join(', ') 217 | end 218 | end 219 | end 220 | 221 | def subnet_by_id(id) 222 | @subnets_by_id ||= {} 223 | @subnets_by_id[id] ||= 224 | begin 225 | resp = @ec2_client.describe_subnets( 226 | subnet_ids: [id] 227 | ) 228 | subnets = resp.subnets 229 | if subnets.count == 1 230 | subnets.first 231 | elsif subnets.count < 1 232 | raise "No subnet with id '#{id}' was found." 233 | elsif subnets.count > 1 234 | raise "More than one subnet with this id '#{id}' found : " + subnets.join(', ') 235 | end 236 | end 237 | end 238 | 239 | def subnets(*names) 240 | names.map { |n| subnet(n) } 241 | end 242 | 243 | def subnets_for_vpc(vpc_id) 244 | @subnets_for_vpc ||= {} 245 | @subnets_for_vpc[vpc_id] ||= 246 | begin 247 | resp = @ec2_client.describe_subnets( 248 | filters: [ 249 | { name: 'vpc-id', values: [vpc_id] } 250 | ] 251 | ) 252 | 253 | subnets = resp.subnets 254 | 255 | if subnets.count >= 1 256 | subnets 257 | elsif subnets.count < 1 258 | raise "No subnets found for '#{vpc_id}'." 259 | end 260 | end 261 | end 262 | 263 | def ami(name, owners = ['self']) 264 | @ami ||= {} 265 | @ami[name] ||= 266 | begin 267 | warn "looking for an image with prefix '#{name}'" 268 | resp = @ec2_client.describe_images(owners: owners) 269 | raise 'no images were found' if resp.images.count < 1 270 | 271 | m = resp.images.select { |a| /^#{name}/.match(a.name) } 272 | raise "no image with name '#{name}' was found" if m.count == 0 273 | 274 | m.sort { |x, y| y.creation_date <=> x.creation_date }.shift.image_id 275 | end 276 | end 277 | 278 | def availability_zones 279 | @availability_zones ||= 280 | begin 281 | warn 'looking for AZs in the current region' 282 | resp = @ec2_client.describe_availability_zones({}) 283 | resp.availability_zones.map(&:zone_name) 284 | end 285 | end 286 | 287 | def vpc(name) 288 | @vpcs ||= {} 289 | @vpcs[name] ||= 290 | begin 291 | warn "looking for a VPC with name '#{name}'" 292 | resp = @ec2_client.describe_vpcs({}) 293 | matching_vpcs = resp.vpcs.select do |vpc| 294 | name_tag = vpc.tags.select { |tag| tag.key == 'Name' }.first 295 | name_tag && name_tag.value == name 296 | end 297 | if matching_vpcs.count == 1 298 | matching_vpcs.first 299 | elsif matching_vpcs.count < 1 300 | raise "No VPC with name '#{name}' was found." 301 | elsif matching_vpcs.count > 1 302 | raise "More than one VPC with name '#{name}' was found: " + matching_vpcs.join(', ') 303 | end 304 | end 305 | end 306 | 307 | def route_table(name) 308 | @route_tables ||= {} 309 | @route_tables[name] ||= 310 | begin 311 | warn "looking for a route table with name '#{name}'" 312 | route_tables = @ec2_client.describe_route_tables( 313 | filters: [ 314 | { 315 | name: 'tag:Name', 316 | values: [name] 317 | } 318 | ] 319 | ).route_tables 320 | if route_tables.count == 1 321 | route_tables.first.route_table_id 322 | elsif route_tables.count < 1 323 | raise "No route table with name '#{name}' was found." 324 | elsif route_tables.count > 1 325 | raise "More than one route table with name '#{name}' was found: " + route_tables.join(', ') 326 | end 327 | end 328 | end 329 | 330 | def elastic_ip(alloc_id) 331 | @ips ||= {} 332 | @ips[alloc_id] ||= 333 | begin 334 | warn "looking for an elastic ip with allocation_id '#{alloc_id}'" 335 | ips = @ec2_client.describe_addresses( 336 | filters: [ 337 | { 338 | name: 'allocation-id', 339 | values: [alloc_id] 340 | } 341 | ] 342 | ).addresses 343 | if ips.count == 1 344 | ips.first 345 | elsif ips.count < 1 346 | raise "No elastic ip with allocation_id '#{alloc_id}' was found." 347 | elsif ips.count > 1 348 | raise "More than one elastic ip with allocation_id '#{alloc_id}' was found: " + ips.join(', ') 349 | end 350 | end 351 | end 352 | 353 | def hosted_zone(fqdn) 354 | @hosted_zones ||= {} 355 | @hosted_zones[fqdn] ||= 356 | begin 357 | warn "looking for a hosted zone with fqdn '#{fqdn}'" 358 | hosted_zones = @route53_client.list_hosted_zones_by_name(dns_name: fqdn).hosted_zones.select do |zone| 359 | zone.name == "#{fqdn}." && !zone.config.private_zone 360 | end 361 | if hosted_zones.count == 1 362 | hosted_zones.first 363 | elsif hosted_zones.count < 1 364 | raise "No hosted zone with fqdn '#{fqdn}' was found." 365 | elsif hosted_zones.count > 1 366 | raise "More than one hosted zone with name '#{fqdn}' was found: " + hosted_zones.join(', ') 367 | end 368 | end 369 | end 370 | 371 | def hosted_zone_by_tag(tag) 372 | @hosted_zones ||= {} 373 | @hosted_zones[tag] ||= 374 | begin 375 | warn "looking for a hosted zone with tag '#{tag}'" 376 | @aws_hosted_zones ||= @route53_client.list_hosted_zones.hosted_zones.map do |zone| 377 | { 378 | zone: zone, 379 | tags: @route53_client.list_tags_for_resource(resource_type: 'hostedzone', resource_id: zone.id.split('/')[2]).resource_tag_set.tags 380 | } 381 | end 382 | 383 | hosted_zones = @aws_hosted_zones.select do |z| 384 | z[:tags].any? do |aws_tag| 385 | tag.any? { |k, v| aws_tag.key = String(k) && aws_tag.value == v } 386 | end 387 | end 388 | 389 | if hosted_zones.count == 1 390 | hosted_zones.first[:zone] 391 | elsif hosted_zones.count < 1 392 | raise "No hosted zone with tag '#{tag}' was found." 393 | elsif hosted_zones.count > 1 394 | raise "More than one hosted zone with tag '#{tag}' was found: " + hosted_zones.join(', ') 395 | end 396 | end 397 | end 398 | 399 | def s3_object(bucket, key) 400 | @s3_objects ||= {} 401 | @s3_objects["#{bucket}-#{key}"] ||= 402 | begin 403 | resp = @s3_client.get_object(bucket: bucket, key: key) 404 | resp.body.read 405 | end 406 | end 407 | 408 | def list_objects(bucket) 409 | @list_objects ||= {} 410 | @list_objects[bucket] ||= 411 | begin 412 | resp = @s3_client.list_objects_v2(bucket: bucket) 413 | resp.contents 414 | end 415 | end 416 | 417 | def endpoint_service_by_name(service_name) 418 | @endpoint_service ||= {} 419 | @endpoint_service[service_name] ||= 420 | begin 421 | resp = @ec2_client.describe_vpc_endpoint_service_configurations( 422 | filters: [ 423 | { 424 | name: 'service-name', 425 | values: [service_name] 426 | } 427 | ] 428 | ) 429 | 430 | endpoint_services = resp.service_configurations 431 | if endpoint_services.count == 1 432 | endpoint_services.first 433 | elsif endpoint_services.count < 1 434 | raise "No endpoint service with name '#{service_name}' was found." 435 | elsif endpoint_services.count > 1 436 | raise "More than one endpoint service with name '#{service_name}' was found: " + endpoint_services.join(', ') 437 | end 438 | end 439 | end 440 | 441 | def endpoint_service_by_lb_arn(arn) 442 | @endpoint_services_by_lb_arn ||= {} 443 | @endpoint_services_by_lb_arn[arn] ||= 444 | begin 445 | resp = @ec2_client.describe_vpc_endpoint_service_configurations 446 | 447 | services = resp.service_configurations.select do |service| 448 | service.network_load_balancer_arns.include?(arn) 449 | end 450 | 451 | if services.count == 1 452 | services.first 453 | elsif services.count < 1 454 | raise "No endpoint service with lb arn '#{arn}' was found." 455 | elsif services.count > 1 456 | raise "More than one endpoint service with lb arn '#{arn}' was found: " + services.join(', ') 457 | end 458 | end 459 | end 460 | 461 | def lb_by_name(name) 462 | @lbs ||= {} 463 | @lbs[name] ||= 464 | begin 465 | load_balancers = @elb_client.describe_load_balancers(names: [name]).load_balancers 466 | 467 | if load_balancers.count == 1 468 | load_balancers.first 469 | elsif load_balancers.count < 1 470 | raise "No load balancer with name '#{name}' was found." 471 | elsif load_balancers.count > 1 472 | raise "More than one load balancer with name '#{name}' was found: " + load_balancers.join(', ') 473 | end 474 | end 475 | end 476 | 477 | def target_groups_by_lb(arn) 478 | @target_groups ||= {} 479 | @target_groups[arn] ||= 480 | begin 481 | resp = @elb_client.describe_target_groups( 482 | load_balancer_arn: arn 483 | ) 484 | 485 | resp.target_groups 486 | end 487 | end 488 | 489 | def asgs_by_tags(expectedTags = {}) 490 | asgs = [] 491 | next_token = nil 492 | 493 | loop do 494 | resp = @autoscaling_client.describe_auto_scaling_groups(next_token: next_token) 495 | 496 | asgs += resp.auto_scaling_groups.select do |asg| 497 | matches = asg.tags.select do |tag| 498 | expectedTags[tag.key.to_sym] == tag.value || 499 | expectedTags[tag.key] == tag.value 500 | end 501 | 502 | matches.count == expectedTags.count 503 | end 504 | 505 | if resp.next_token 506 | next_token = resp.next_token 507 | else 508 | break 509 | end 510 | end 511 | 512 | asgs 513 | end 514 | 515 | def products(products_filter, _region = 'us-east-1') 516 | next_token = nil 517 | Enumerator.new do |y| 518 | loop do 519 | resp = @pricing_client.get_products(products_filter.merge(next_token: next_token)) 520 | resp.price_list.each do |product| 521 | y << product 522 | end 523 | next_token = resp.next_token 524 | break if next_token.nil? 525 | end 526 | end 527 | end 528 | 529 | def instance_type_vcpu_count(instance_type, location = 'EU (Ireland)') 530 | products_filter = { 531 | service_code: 'AmazonEC2', 532 | filters: [ 533 | { field: 'operatingSystem', type: 'TERM_MATCH', value: 'Linux' }, 534 | { field: 'tenancy', type: 'TERM_MATCH', value: 'Shared' }, 535 | { field: 'instanceType', type: 'TERM_MATCH', value: instance_type }, 536 | { field: 'location', type: 'TERM_MATCH', value: location }, 537 | { field: 'preInstalledSw', type: 'TERM_MATCH', value: 'NA' } 538 | ], 539 | format_version: 'aws_v1' 540 | } 541 | 542 | products(products_filter).each do |product| 543 | vcpu = JSON.parse(product)['product']['attributes']['vcpu'] 544 | return vcpu.to_i if vcpu 545 | end 546 | end 547 | 548 | def msk_brokers(cluster_arn) 549 | @brokers ||= {} 550 | @brokers[cluster_arn] ||= begin 551 | resp = @msk_client.get_bootstrap_brokers(cluster_arn: cluster_arn) 552 | brokers = resp.bootstrap_broker_string_tls.split(',') 553 | 554 | raise "No brokers found for cluster with arn: \"#{cluster_arn}\"'" if brokers.empty? 555 | 556 | brokers 557 | end 558 | end 559 | end 560 | end 561 | end 562 | -------------------------------------------------------------------------------- /lib/terrafying/cli.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'thor' 4 | 5 | module Terrafying 6 | class Cli < Thor 7 | class_option :lock_timeout, type: :string, default: nil 8 | class_option :no_lock, type: :boolean, default: false 9 | class_option :keep, type: :boolean, default: false 10 | class_option :target, type: :string, default: nil 11 | class_option :scope, type: :string, default: nil 12 | class_option :dynamodb, type: :boolean, default: true 13 | 14 | desc 'list PATH', 'List resources defined' 15 | def list(path) 16 | puts "Defined resources:\n\n" 17 | Config.new(path, options).list.each do |name| 18 | puts name.to_s 19 | end 20 | end 21 | 22 | desc 'plan PATH', 'Show execution plan' 23 | def plan(path) 24 | exit Config.new(path, options).plan 25 | end 26 | 27 | desc 'graph PATH', 'Show execution graph' 28 | def graph(path) 29 | exit Config.new(path, options).graph 30 | end 31 | 32 | desc 'validate PATH', 'Validate the generated Terraform' 33 | def validate(path) 34 | exit Config.new(path, options).validate 35 | end 36 | 37 | desc 'apply PATH', 'Apply changes to resources' 38 | option :force, aliases: ['f'], type: :boolean, desc: 'Forcefully remove any pending locks' 39 | def apply(path) 40 | exit Config.new(path, options).apply 41 | end 42 | 43 | desc 'destroy PATH', 'Destroy resources' 44 | option :force, aliases: ['f'], type: :boolean, desc: 'Forcefully remove any pending locks' 45 | def destroy(path) 46 | exit Config.new(path, options).destroy 47 | end 48 | 49 | desc 'json PATH', 'Show terraform JSON' 50 | def json(path) 51 | puts(Config.new(path, options).json) 52 | end 53 | 54 | desc 'show-state PATH', 'Show state' 55 | def show_state(path) 56 | puts(Config.new(path, options).show_state) 57 | end 58 | 59 | desc 'use-remote-state PATH', 'Migrate to using remote state storage' 60 | def use_remote_state(path) 61 | puts(Config.new(path, options).use_remote_state) 62 | end 63 | 64 | desc 'use-local-state PATH', 'Migrate to using local state storage' 65 | def use_local_state(path) 66 | puts(Config.new(path, options).use_local_state) 67 | end 68 | 69 | desc 'import PATH ADDR ID', 'Import existing infrastructure into your Terraform state' 70 | def import(path, addr, id) 71 | exit Config.new(path, options).import(addr, id) 72 | end 73 | 74 | def method_missing(*args) 75 | json(args[0].to_s) 76 | end 77 | end 78 | end 79 | -------------------------------------------------------------------------------- /lib/terrafying/dynamodb.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'aws-sdk-dynamodb' 4 | require 'json' 5 | require 'securerandom' 6 | 7 | # oh rubby 8 | class ::Aws::DynamoDB::Client 9 | def ensure_table(table_spec, &block) 10 | retried = false 11 | begin 12 | yield block 13 | rescue ::Aws::DynamoDB::Errors::ResourceNotFoundException => e 14 | if !retried 15 | create_table(table_spec) 16 | retry 17 | else 18 | raise e 19 | end 20 | end 21 | end 22 | end 23 | 24 | module Terrafying 25 | module DynamoDb 26 | def self.client 27 | @@client ||= ::Aws::DynamoDB::Client.new( 28 | region: Terrafying::Context::REGION 29 | # endpoint: 'http://localhost:8000', 30 | ) 31 | end 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /lib/terrafying/dynamodb/config.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Terrafying 4 | module DynamoDb 5 | class Config 6 | attr_accessor :state_table, :lock_table 7 | 8 | def initialize 9 | @state_table = 'terrafying-state' 10 | @lock_table = 'terrafying-state-lock' 11 | end 12 | end 13 | 14 | def config 15 | @config ||= Config.new 16 | end 17 | module_function :config 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /lib/terrafying/dynamodb/named_lock.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'terrafying/dynamodb' 4 | require 'terrafying/dynamodb/config' 5 | 6 | module Terrafying 7 | module DynamoDb 8 | class NamedLock 9 | def initialize(table_name, name) 10 | @table_name = table_name 11 | @name = name 12 | @client = Terrafying::DynamoDb.client 13 | end 14 | 15 | def status 16 | @client.ensure_table(table) do 17 | resp = @client.get_item( 18 | table_name: @table_name, 19 | key: { 20 | 'name' => @name 21 | }, 22 | consistent_read: true 23 | ) 24 | if resp.item 25 | return { 26 | status: :locked, 27 | locked_at: resp.item['locked_at'], 28 | metadata: resp.item['metadata'] 29 | } 30 | else 31 | return { 32 | status: :unlocked 33 | } 34 | end 35 | end 36 | end 37 | 38 | def acquire 39 | @client.ensure_table(table) do 40 | lock_id = SecureRandom.uuid 41 | @client.update_item(acquire_request(lock_id)) 42 | return lock_id 43 | rescue ::Aws::DynamoDB::Errors::ConditionalCheckFailedException 44 | raise "Unable to acquire lock: #{status.inspect}" # TODO 45 | end 46 | end 47 | 48 | def steal 49 | @client.ensure_table(table) do 50 | lock_id = SecureRandom.uuid 51 | req = acquire_request(lock_id) 52 | req.delete(:condition_expression) 53 | @client.update_item(req) 54 | return lock_id 55 | rescue ::Aws::DynamoDB::Errors::ConditionalCheckFailedException 56 | raise "Unable to steal lock: #{status.inspect}" # TODO 57 | end 58 | end 59 | 60 | def release(lock_id) 61 | @client.ensure_table(table) do 62 | @client.delete_item( 63 | table_name: @table_name, 64 | key: { 65 | 'name' => @name 66 | }, 67 | return_values: 'NONE', 68 | condition_expression: 'lock_id = :lock_id', 69 | expression_attribute_values: { 70 | ':lock_id' => lock_id 71 | } 72 | ) 73 | nil 74 | rescue ::Aws::DynamoDB::Errors::ConditionalCheckFailedException 75 | raise "Unable to release lock: #{status.inspect}" # TODO 76 | end 77 | end 78 | 79 | private 80 | 81 | def acquire_request(lock_id) 82 | { 83 | table_name: @table_name, 84 | key: { 85 | 'name' => @name 86 | }, 87 | return_values: 'NONE', 88 | update_expression: 'SET lock_id = :lock_id, locked_at = :locked_at, metadata = :metadata', 89 | condition_expression: 'attribute_not_exists(lock_id)', 90 | expression_attribute_values: { 91 | ':lock_id' => lock_id, 92 | ':locked_at' => Time.now.to_s, 93 | ':metadata' => { 94 | 'owner' => "#{`git config user.name`.chomp} (#{`git config user.email`.chomp})" 95 | } 96 | } 97 | } 98 | end 99 | 100 | def table 101 | { 102 | table_name: @table_name, 103 | attribute_definitions: [ 104 | { 105 | attribute_name: 'name', 106 | attribute_type: 'S' 107 | } 108 | ], 109 | key_schema: [ 110 | { 111 | attribute_name: 'name', 112 | key_type: 'HASH' 113 | } 114 | ], 115 | provisioned_throughput: { 116 | read_capacity_units: 1, 117 | write_capacity_units: 1 118 | } 119 | } 120 | end 121 | end 122 | end 123 | end 124 | -------------------------------------------------------------------------------- /lib/terrafying/dynamodb/state.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'digest' 4 | require 'terrafying/dynamodb/config' 5 | 6 | module Terrafying 7 | module DynamoDb 8 | class StateStore 9 | def initialize(scope, _opts = {}) 10 | @scope = scope 11 | @client = Terrafying::DynamoDb.client 12 | @table_name = Terrafying::DynamoDb.config.state_table 13 | end 14 | 15 | def get 16 | @client.ensure_table(table) do 17 | resp = @client.query( 18 | table_name: @table_name, 19 | limit: 1, 20 | key_conditions: { 21 | 'scope' => { 22 | attribute_value_list: [@scope], 23 | comparison_operator: 'EQ' 24 | } 25 | }, 26 | scan_index_forward: false 27 | ) 28 | case resp.items.count 29 | when 0 then return nil 30 | when 1 then return resp.items.first['state'] 31 | else raise 'More than one item found when retrieving state. This is a bug and should never happen.' if resp.items.count != 1 32 | end 33 | end 34 | end 35 | 36 | def put(state) 37 | @client.ensure_table(table) do 38 | sha256 = Digest::SHA256.hexdigest(state) 39 | json = JSON.parse(state) 40 | @client.update_item( 41 | table_name: @table_name, 42 | key: { 43 | 'scope' => @scope, 44 | 'serial' => json['serial'].to_i 45 | }, 46 | return_values: 'NONE', 47 | update_expression: 'SET sha256 = :sha256, #state = :state', 48 | condition_expression: 'attribute_not_exists(serial) OR sha256 = :sha256', 49 | expression_attribute_names: { 50 | '#state' => 'state' 51 | }, 52 | expression_attribute_values: { 53 | ':sha256' => sha256, 54 | ':state' => state 55 | } 56 | ) 57 | end 58 | end 59 | 60 | def table 61 | { 62 | table_name: @table_name, 63 | attribute_definitions: [ 64 | { 65 | attribute_name: 'scope', 66 | attribute_type: 'S' 67 | }, 68 | { 69 | attribute_name: 'serial', 70 | attribute_type: 'N' 71 | } 72 | ], 73 | key_schema: [ 74 | { 75 | attribute_name: 'scope', 76 | key_type: 'HASH' 77 | }, 78 | { 79 | attribute_name: 'serial', 80 | key_type: 'RANGE' 81 | } 82 | 83 | ], 84 | provisioned_throughput: { 85 | read_capacity_units: 1, 86 | write_capacity_units: 1 87 | } 88 | } 89 | end 90 | end 91 | end 92 | end 93 | -------------------------------------------------------------------------------- /lib/terrafying/generator.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'json' 4 | require 'base64' 5 | require 'erb' 6 | require 'ostruct' 7 | require 'deep_merge' 8 | require 'terrafying/aws' 9 | 10 | module Terrafying 11 | ARG_PLACEHOLDER = 'ARG_PLACEHOLDER123' 12 | 13 | class Ref 14 | def fn_call(fn, *args) 15 | args = [ARG_PLACEHOLDER] if args.empty? 16 | FnRef.new(fn: fn, args: args, ref: self) 17 | end 18 | 19 | def downcase 20 | fn_call('lower') 21 | end 22 | 23 | def strip 24 | fn_call('trimspace') 25 | end 26 | 27 | def split(separator) 28 | fn_call('split', separator, ARG_PLACEHOLDER) 29 | end 30 | 31 | def slice(idx, length = 0) 32 | if length != 0 33 | fn_call('slice', ARG_PLACEHOLDER, idx, idx + length) 34 | else 35 | fn_call('element', ARG_PLACEHOLDER, idx) 36 | end 37 | end 38 | 39 | def realise 40 | '' 41 | end 42 | 43 | def to_s 44 | "${#{realise}}" 45 | end 46 | 47 | def to_str 48 | to_s 49 | end 50 | 51 | def <=>(other) 52 | to_s <=> other.to_s 53 | end 54 | 55 | def ==(other) 56 | to_s == other.to_s 57 | end 58 | 59 | def [](key) 60 | if key.is_a? Numeric 61 | IndexRef.new(ref: self, idx: key) 62 | else 63 | AttributeRef.new(ref: self, key: key) 64 | end 65 | end 66 | 67 | def []=(_k, _v) 68 | raise "You can't set a value this way" 69 | end 70 | end 71 | 72 | class RootRef < Ref 73 | def initialize( 74 | kind: :resource, 75 | type: '', 76 | name: 77 | ) 78 | @kind = kind 79 | @type = type 80 | @name = name 81 | end 82 | 83 | def realise 84 | type = [@type] 85 | type = [@kind, @type] if @kind != :resource 86 | 87 | (type + [@name]).reject(&:empty?).join('.') 88 | end 89 | 90 | def fn_call(fn, *args) 91 | if @kind == :resource 92 | self['id'].fn_call(fn, *args) 93 | else 94 | super 95 | end 96 | end 97 | 98 | def to_s 99 | if @kind == :resource 100 | "${#{realise}.id}" 101 | else 102 | super 103 | end 104 | end 105 | end 106 | 107 | class AttributeRef < Ref 108 | def initialize( 109 | ref:, 110 | key: 111 | ) 112 | @ref = ref 113 | @key = key 114 | end 115 | 116 | def realise 117 | "#{@ref.realise}.#{@key}" 118 | end 119 | end 120 | 121 | class IndexRef < Ref 122 | def initialize( 123 | ref:, 124 | idx: 125 | ) 126 | @ref = ref 127 | @idx = idx 128 | end 129 | 130 | def realise 131 | "#{@ref.realise}[#{@idx}]" 132 | end 133 | end 134 | 135 | class FnRef < Ref 136 | def initialize( 137 | ref:, 138 | fn:, 139 | args: [] 140 | ) 141 | @ref = ref 142 | @fn = fn 143 | @args = args 144 | end 145 | 146 | def realise 147 | ref = @ref.realise 148 | args = @args.map do |arg| 149 | if arg == ARG_PLACEHOLDER 150 | ref 151 | elsif arg.is_a? String 152 | "\"#{arg}\"" 153 | else 154 | arg 155 | end 156 | end.join(', ') 157 | 158 | "#{@fn}(#{args})" 159 | end 160 | end 161 | 162 | class Context 163 | REGION = ENV.fetch('AWS_REGION', 'eu-west-1') 164 | 165 | PROVIDER_DEFAULTS = { 166 | aws: { region: REGION } 167 | }.freeze 168 | 169 | def self.bundle(&block) 170 | ctx = Context.new 171 | ctx.instance_eval(&block) 172 | ctx 173 | end 174 | 175 | attr_reader :output 176 | 177 | def initialize 178 | @output = { 179 | 'resource' => {} 180 | } 181 | @children = [] 182 | end 183 | 184 | def aws 185 | @@aws ||= Terrafying::Aws::Ops.new REGION 186 | end 187 | 188 | def provider(name, spec) 189 | key = provider_key(name, spec) 190 | @providers ||= {} 191 | raise "Duplicate provider configuration detected for #{key}" if key_exists_spec_differs(key, name, spec) 192 | 193 | @providers[key] = { name.to_s => spec } 194 | @output['provider'] = @providers.values 195 | key 196 | end 197 | 198 | def provider_key(name, spec) 199 | [name, spec[:alias]].compact.join('.') 200 | end 201 | 202 | def required_provider(name, spec) 203 | @output['terraform'] ||= {} 204 | @output['terraform']['required_providers'] ||= {} 205 | raise "Duplicate required_provider configuration detected for #{name}" if @output['terraform']['required_providers'].key? name.to_s 206 | 207 | @output['terraform']['required_providers'][name.to_s] = spec 208 | end 209 | 210 | def required_version(version) 211 | @output['terraform'] ||= {} 212 | raise "required_version already configure" if @output['terraform']['required_version'] 213 | 214 | @output['terraform']['required_version'] = "#{version}" 215 | end 216 | 217 | def key_exists_spec_differs(key, name, spec) 218 | @providers.key?(key) && spec != @providers[key][name.to_s] 219 | end 220 | 221 | def local(name, value) 222 | @output['locals'] ||= {} 223 | 224 | raise "Local already exists #{name}" if @output['locals'].key? name.to_s 225 | 226 | @output['locals'][name.to_s] = value 227 | RootRef.new(kind: :local, name: name) 228 | end 229 | 230 | def var(name, spec) 231 | @output['variable'] ||= {} 232 | 233 | raise "Var already exists #{name}" if @output['variable'].key? name.to_s 234 | 235 | @output['variable'][name.to_s] = spec 236 | RootRef.new(kind: :var, name: name) 237 | end 238 | 239 | def data(type, name, spec) 240 | @output['data'] ||= {} 241 | @output['data'][type.to_s] ||= {} 242 | 243 | raise "Data already exists #{type}.#{name}" if @output['data'][type.to_s].key? name.to_s 244 | 245 | @output['data'][type.to_s][name.to_s] = spec 246 | RootRef.new(kind: :data, type: type, name: name) 247 | end 248 | 249 | def resource(type, name, attributes) 250 | @output['resource'][type.to_s] ||= {} 251 | 252 | raise "Resource already exists #{type}.#{name}" if @output['resource'][type.to_s].key? name.to_s 253 | 254 | @output['resource'][type.to_s][name.to_s] = attributes 255 | RootRef.new(kind: :resource, type: type, name: name) 256 | end 257 | 258 | def tf_module(name, spec) 259 | @output['module'] ||= {} 260 | 261 | raise "Module already exists #{name}" if @output['module'].key? name.to_s 262 | 263 | @output['module'][name.to_s] = spec 264 | 265 | RootRef.new(kind: :module, name: name) 266 | end 267 | 268 | def template(relative_path, params = {}) 269 | dir = caller_locations[0].path 270 | filename = File.join(File.dirname(dir), relative_path) 271 | erb = ERB.new(IO.read(filename)) 272 | erb.filename = filename 273 | erb.result(OpenStruct.new(params).instance_eval { binding }) 274 | end 275 | 276 | def output_with_children 277 | @children.inject(@output) { |out, c| out.deep_merge(c.output_with_children) } 278 | end 279 | 280 | def id_of(type, name) 281 | output_of(type, name, 'id') 282 | end 283 | 284 | def output_of(type, name, key) 285 | RootRef.new(kind: :resource, type: type, name: name)[key] 286 | end 287 | 288 | def pretty_generate 289 | JSON.pretty_generate(output_with_children) 290 | end 291 | 292 | def resource_names 293 | out = output_with_children 294 | ret = [] 295 | out['resource'].keys.each do |type| 296 | out['resource'][type].keys.each do |id| 297 | ret << "#{type}.#{id}" 298 | end 299 | end 300 | ret 301 | end 302 | 303 | def resources 304 | out = output_with_children 305 | ret = [] 306 | out['resource'].keys.each do |type| 307 | out['resource'][type].keys.each do |id| 308 | ret << "${#{type}.#{id}.id}" 309 | end 310 | end 311 | ret 312 | end 313 | 314 | def add!(*c) 315 | @children.push(*c) 316 | c[0] 317 | end 318 | 319 | def tf_safe(str) 320 | str.gsub(%r{[\.\s/\?]}, '-').gsub(%r{\*}, "star") 321 | end 322 | end 323 | 324 | class RootContext < Context 325 | def initialize 326 | super 327 | @providers = {} 328 | end 329 | 330 | def backend(name, spec) 331 | @output['terraform'] = { 332 | backend: { 333 | name => spec 334 | } 335 | } 336 | end 337 | 338 | def generate(&block) 339 | instance_eval(&block) 340 | end 341 | 342 | def method_missing(fn, *args) 343 | resource(fn, args.shift.to_s, args.first) 344 | end 345 | 346 | def output_with_children 347 | PROVIDER_DEFAULTS.each do |name, spec| 348 | unless key_exists_spec_differs(provider_key(name, spec), name, spec) 349 | provider(name, spec) 350 | end 351 | end 352 | 353 | super 354 | end 355 | end 356 | 357 | Generator = RootContext.new 358 | 359 | module DSL 360 | %w[ 361 | add! 362 | aws 363 | local 364 | var 365 | backend 366 | provider 367 | resource 368 | data 369 | tf_module 370 | template 371 | tf_safe 372 | id_of 373 | output_of 374 | ].each do |name| 375 | define_method(name) do |*args| 376 | Generator.send(name, *args) 377 | end 378 | end 379 | end 380 | end 381 | -------------------------------------------------------------------------------- /lib/terrafying/lock.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'terrafying/dynamodb/named_lock' 4 | 5 | module Terrafying 6 | module Locks 7 | class NoOpLock 8 | def acquire 9 | '' 10 | end 11 | 12 | def steal 13 | '' 14 | end 15 | 16 | def release(lock_id); end 17 | end 18 | 19 | def self.noop 20 | NoOpLock.new 21 | end 22 | 23 | def self.dynamodb(scope) 24 | Terrafying::DynamoDb::NamedLock.new(Terrafying::DynamoDb.config.lock_table, scope) 25 | end 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /lib/terrafying/state.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'terrafying/dynamodb/state' 4 | 5 | module Terrafying 6 | module State 7 | STATE_FILENAME = 'terraform.tfstate' 8 | 9 | def self.store(config) 10 | if LocalStateStore.has_local_state?(config) 11 | local(config) 12 | else 13 | remote(config) 14 | end 15 | end 16 | 17 | def self.local(config) 18 | LocalStateStore.new(config.path) 19 | end 20 | 21 | def self.remote(config) 22 | Terrafying::DynamoDb::StateStore.new(config.scope) 23 | end 24 | 25 | class LocalStateStore 26 | def initialize(path) 27 | @path = LocalStateStore.state_path(path) 28 | end 29 | 30 | def get 31 | IO.read(@path) 32 | end 33 | 34 | def put(state) 35 | IO.write(@path, state) 36 | end 37 | 38 | def delete 39 | File.delete(@path) 40 | end 41 | 42 | def self.has_local_state?(config) 43 | File.exist?(state_path(config.path)) 44 | end 45 | 46 | private 47 | 48 | def self.state_path(path) 49 | File.join(File.dirname(path), STATE_FILENAME) 50 | end 51 | end 52 | end 53 | end 54 | -------------------------------------------------------------------------------- /lib/terrafying/util.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'yaml' 4 | 5 | def data_url_from_string(str) 6 | b64_contents = Base64.strict_encode64(str) 7 | "data:;base64,#{b64_contents}" 8 | end 9 | 10 | module Terrafying 11 | module Util 12 | def self.to_ignition(yaml) 13 | config = YAML.safe_load(yaml) 14 | 15 | if config.key?('storage') && config['storage'].key?('files') 16 | files = config['storage']['files'] 17 | config['storage']['files'] = files.each do |file| 18 | next unless file['contents'].is_a? String 19 | 20 | file['contents'] = { 21 | source: data_url_from_string(file['contents']) 22 | } 23 | end 24 | end 25 | 26 | JSON.generate(config) 27 | end 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /lib/terrafying/version.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module Terrafying 4 | VERSION = '0.0.0' # will be inserted by Drone 5 | CLI_VERSION = '0.11.7' 6 | end 7 | -------------------------------------------------------------------------------- /repl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | require 'bundler/setup' 5 | require 'terrafying' 6 | 7 | require 'irb' 8 | IRB.start 9 | -------------------------------------------------------------------------------- /spec/terrafying/generator_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'terrafying/generator' 4 | 5 | RSpec.describe Terrafying::Ref do 6 | context 'to_s' do 7 | it 'should return an interpolated string' do 8 | ref = Terrafying::RootRef.new(kind: :var, name: 'thingy') 9 | 10 | expect(ref.to_s).to eq('${var.thingy}') 11 | end 12 | end 13 | 14 | context 'downcase' do 15 | it 'should wrap it in lower' do 16 | ref = Terrafying::RootRef.new(kind: :var, name: 'thingy') 17 | 18 | expect(ref.downcase.to_s).to eq('${lower(var.thingy)}') 19 | end 20 | end 21 | 22 | context 'strip' do 23 | it 'should wrap it in trimspace' do 24 | ref = Terrafying::RootRef.new(kind: :var, name: 'thingy') 25 | 26 | expect(ref.strip.to_s).to eq('${trimspace(var.thingy)}') 27 | end 28 | end 29 | 30 | context 'split' do 31 | it 'should wrap correctly' do 32 | ref = Terrafying::RootRef.new(kind: :var, name: 'thingy') 33 | expect(ref.split('/').to_s).to eq('${split("/", var.thingy)}') 34 | end 35 | 36 | it 'should stack correctly' do 37 | ref = Terrafying::RootRef.new(kind: :var, name: 'thingy') 38 | expect(ref.split('/')[0].downcase.to_s).to eq('${lower(split("/", var.thingy)[0])}') 39 | end 40 | end 41 | 42 | context 'slice' do 43 | it 'should call element by default' do 44 | ref = Terrafying::RootRef.new(kind: :var, name: 'thingy') 45 | expect(ref.slice(1).to_s).to eq('${element(var.thingy, 1)}') 46 | end 47 | 48 | it 'should call slice when length is non-zero' do 49 | ref = Terrafying::RootRef.new(kind: :var, name: 'thingy') 50 | expect(ref.slice(1, 2).to_s).to eq('${slice(var.thingy, 1, 3)}') 51 | end 52 | end 53 | 54 | context 'lookup' do 55 | it 'should do numbers and strings' do 56 | ref = Terrafying::RootRef.new(kind: :var, name: 'list') 57 | expect(ref[0]['name'].to_s).to eq('${var.list[0].name}') 58 | end 59 | end 60 | 61 | it 'should stack functions' do 62 | ref = Terrafying::RootRef.new(kind: :var, name: 'thingy') 63 | 64 | expect(ref.downcase.strip.to_s).to eq('${trimspace(lower(var.thingy))}') 65 | end 66 | 67 | it 'should be comparable' do 68 | refs = [ 69 | ref = Terrafying::RootRef.new(kind: :var, name: 'b'), 70 | ref = Terrafying::RootRef.new(kind: :var, name: 'a') 71 | ] 72 | 73 | expect(refs.min.to_s).to eq('${var.a}') 74 | end 75 | 76 | it 'implements equality' do 77 | a = Terrafying::RootRef.new(kind: :var, name: 'a') 78 | a2 = Terrafying::RootRef.new(kind: :var, name: 'a') 79 | b = Terrafying::RootRef.new(kind: :var, name: 'b') 80 | 81 | expect(a == a).to be true 82 | expect(a == a2).to be true 83 | expect(a == b).to be false 84 | end 85 | 86 | it 'lets us look up a var' do 87 | r = Terrafying::RootRef.new(kind: :resource, type: 'aws_wibble', name: 'foo') 88 | expect(r.to_s).to eq('${aws_wibble.foo.id}') 89 | r_thing = r['thing'] 90 | expect(r_thing.to_s).to eq('${aws_wibble.foo.thing}') 91 | r_thing_id = r_thing['id'] 92 | expect(r_thing_id.to_s).to eq('${aws_wibble.foo.thing.id}') 93 | end 94 | 95 | it 'lets us look up an output' do 96 | r = Terrafying::RootRef.new(kind: :module, name: 'wibble') 97 | r_thing = r['thing'] 98 | expect(r_thing.to_s).to eq('${module.wibble.thing}') 99 | end 100 | 101 | it 'lets us look up a var when called fn' do 102 | r = Terrafying::RootRef.new(kind: :resource, type: 'aws_wibble', name: 'foo') 103 | r_lower = r.downcase 104 | expect(r_lower.to_s).to eq('${lower(aws_wibble.foo.id)}') 105 | r_lower_wibble = r['wibble'].downcase 106 | expect(r_lower_wibble.to_s).to eq('${lower(aws_wibble.foo.wibble)}') 107 | end 108 | end 109 | 110 | RSpec.describe Terrafying::Context do 111 | context 'var' do 112 | it 'should output the right thing' do 113 | context = Terrafying::Context.new 114 | 115 | var = context.var :foo, type: 'string', default: 'asdf' 116 | 117 | expect(var.to_s).to eq('${var.foo}') 118 | end 119 | 120 | it 'should not be able to make two vars with same name' do 121 | context = Terrafying::Context.new 122 | 123 | context.var(:foo, {}) 124 | expect do 125 | context.var(:foo, {}) 126 | end.to raise_error(/foo/) 127 | end 128 | end 129 | 130 | context 'local' do 131 | it 'should output the right thing' do 132 | context = Terrafying::Context.new 133 | 134 | local = context.local :foo, 'wibble' 135 | 136 | expect(local.to_s).to eq('${local.foo}') 137 | end 138 | 139 | it 'should not be able to make two locals with same name' do 140 | context = Terrafying::Context.new 141 | 142 | context.local(:foo, {}) 143 | expect do 144 | context.local(:foo, {}) 145 | end.to raise_error(/foo/) 146 | end 147 | end 148 | 149 | context 'provider' do 150 | it 'should output a string' do 151 | context = Terrafying::Context.new 152 | 153 | provider = context.provider(:aws, {}) 154 | 155 | expect(provider).to eq('aws') 156 | end 157 | 158 | it 'should output a string with alias' do 159 | context = Terrafying::Context.new 160 | 161 | provider = context.provider(:aws, alias: 'west') 162 | 163 | expect(provider).to eq('aws.west') 164 | end 165 | 166 | it 'should append providers to an array' do 167 | context = Terrafying::Context.new 168 | 169 | context.provider(:aws, alias: 'west') 170 | 171 | providers = context.output_with_children['provider'] 172 | 173 | expect(providers).to include( 174 | a_hash_including('aws' => { alias: 'west' }) 175 | ) 176 | end 177 | 178 | it 'should append multiple providers to an array' do 179 | context = Terrafying::Context.new 180 | 181 | context.provider(:aws, alias: 'west') 182 | context.provider(:aws, alias: 'east') 183 | 184 | providers = context.output_with_children['provider'] 185 | 186 | expect(providers).to include( 187 | a_hash_including('aws' => { alias: 'west' }), 188 | a_hash_including('aws' => { alias: 'east' }) 189 | ) 190 | end 191 | 192 | it 'should not allow duplicate providers' do 193 | context = Terrafying::Context.new 194 | 195 | context.provider(:aws, alias: 'west') 196 | context.provider(:aws, alias: 'west') 197 | 198 | providers = context.output_with_children['provider'] 199 | 200 | expect(providers.size).to eq(1) 201 | end 202 | 203 | it 'should reject duplicate providers on name + alias' do 204 | context = Terrafying::Context.new 205 | 206 | context.provider(:aws, alias: 'west', region: 'eu-west-1') 207 | expect do 208 | context.provider(:aws, alias: 'west', region: 'eu-west-2') 209 | end.to raise_error(/aws\.west/) 210 | end 211 | 212 | it 'should merge nested contexts with default root providers' do 213 | root_context = Terrafying::RootContext.new 214 | nested_context = Terrafying::Context.new 215 | more_nested = Terrafying::Context.new 216 | 217 | more_nested.provider(:aws, alias: 'east', region: 'eu-east-1') 218 | nested_context.add! more_nested 219 | root_context.add! nested_context 220 | 221 | providers = root_context.output_with_children['provider'] 222 | 223 | expect(providers.size).to eq(2) 224 | expect(providers).to include( 225 | a_hash_including('aws' => { region: 'eu-west-1' }), 226 | a_hash_including('aws' => { alias: 'east', region: 'eu-east-1' }) 227 | ) 228 | end 229 | 230 | it 'should merge nested contexts with duplicate providers' do 231 | context = Terrafying::Context.new 232 | nested_context = Terrafying::Context.new 233 | 234 | context.provider(:aws, alias: 'west', region: 'eu-west-1') 235 | nested_context.provider(:aws, alias: 'west', region: 'eu-west-1') 236 | nested_context.provider(:aws, alias: 'east', region: 'eu-east-1') 237 | context.add! nested_context 238 | 239 | providers = context.output_with_children['provider'] 240 | 241 | expect(providers.size).to eq(2) 242 | expect(providers).to include( 243 | a_hash_including('aws' => { alias: 'west', region: 'eu-west-1' }), 244 | a_hash_including('aws' => { alias: 'east', region: 'eu-east-1' }) 245 | ) 246 | end 247 | 248 | it 'should merge nested contexts with providers' do 249 | context = Terrafying::Context.new 250 | nested_context = Terrafying::Context.new 251 | 252 | context.provider(:aws, alias: 'west', region: 'eu-west-1') 253 | nested_context.provider(:aws, alias: 'east', region: 'eu-east-1') 254 | context.add! nested_context 255 | 256 | providers = context.output_with_children['provider'] 257 | 258 | expect(providers.size).to eq(2) 259 | expect(providers).to include( 260 | a_hash_including('aws' => { alias: 'west', region: 'eu-west-1' }), 261 | a_hash_including('aws' => { alias: 'east', region: 'eu-east-1' }) 262 | ) 263 | end 264 | end 265 | 266 | it 'should reject duplicate resources' do 267 | context = Terrafying::Context.new 268 | 269 | context.resource(:aws_instance, 'wibble', {}) 270 | expect do 271 | context.resource(:aws_instance, 'wibble', {}) 272 | end.to raise_error(/aws_instance.wibble/) 273 | end 274 | 275 | it 'should reject duplicate data' do 276 | context = Terrafying::Context.new 277 | 278 | context.data(:aws_instance, 'wibble', {}) 279 | expect do 280 | context.data(:aws_instance, 'wibble', {}) 281 | end.to raise_error(/aws_instance.wibble/) 282 | end 283 | 284 | it 'should reject duplicate module' do 285 | context = Terrafying::Context.new 286 | 287 | context.tf_module(:wibble, {}) 288 | expect do 289 | context.tf_module(:wibble, {}) 290 | end.to raise_error(/wibble/) 291 | end 292 | 293 | context 'output_of' do 294 | it 'should use a ref' do 295 | context = Terrafying::Context.new 296 | 297 | ref = context.output_of(:aws_security_group, 'foo', 'bar').downcase 298 | 299 | expect(ref.to_s).to eq('${lower(aws_security_group.foo.bar)}') 300 | end 301 | end 302 | 303 | context 'id_of' do 304 | it 'should use a ref' do 305 | context = Terrafying::Context.new 306 | 307 | ref = context.id_of(:aws_security_group, 'foo').downcase 308 | 309 | expect(ref.to_s).to eq('${lower(aws_security_group.foo.id)}') 310 | end 311 | end 312 | 313 | it 'should bundle up some resources' do 314 | ctx = Terrafying::Context.bundle do 315 | resource :aws_wibble, 'bibble', {} 316 | end 317 | 318 | expect(ctx.output_with_children['resource']['aws_wibble'].count).to eq(1) 319 | end 320 | end 321 | 322 | RSpec.describe Terrafying::RootContext do 323 | context 'default providers' do 324 | it 'should let you override default providers' do 325 | context = Terrafying::RootContext.new 326 | 327 | context.provider('aws', region: 'wibble-1') 328 | 329 | providers = context.output_with_children['provider'] 330 | 331 | expect(providers).to include( 332 | a_hash_including('aws' => { region: 'wibble-1' }) 333 | ) 334 | end 335 | 336 | it 'should add default ones' do 337 | context = Terrafying::RootContext.new 338 | 339 | providers = context.output_with_children['provider'] 340 | 341 | expect(providers).to include( 342 | a_hash_including('aws' => { region: 'eu-west-1' }) 343 | ) 344 | end 345 | end 346 | end 347 | -------------------------------------------------------------------------------- /terrafying.gemspec: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | lib = File.expand_path('lib', __dir__) 4 | $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) 5 | require 'terrafying/version' 6 | 7 | Gem::Specification.new do |spec| 8 | spec.name = 'terrafying' 9 | spec.version = Terrafying::VERSION 10 | spec.authors = ['uSwitch Limited'] 11 | spec.email = ['developers@uswitch.com'] 12 | spec.license = 'Apache-2.0' 13 | 14 | spec.summary = 'No.' 15 | spec.description = 'No.' 16 | spec.homepage = 'https://github.com/uswitch/terrafying' 17 | 18 | spec.bindir = 'bin' 19 | spec.executables << 'terrafying' 20 | spec.files = Dir.glob('lib/**/*') 21 | spec.require_paths = ['lib'] 22 | 23 | spec.add_development_dependency 'bundler', '~> 2.4' 24 | spec.add_development_dependency 'pry' 25 | spec.add_development_dependency 'rake', '~> 10.0' 26 | spec.add_development_dependency 'rspec', '~> 3.7' 27 | spec.add_development_dependency 'rspec-mocks', '~> 3.7' 28 | 29 | spec.add_runtime_dependency 'aws-sdk-autoscaling', '~> 1' 30 | spec.add_runtime_dependency 'aws-sdk-core', '~> 3' 31 | spec.add_runtime_dependency 'aws-sdk-dynamodb', '~> 1' 32 | spec.add_runtime_dependency 'aws-sdk-ec2', '~> 1' 33 | spec.add_runtime_dependency 'aws-sdk-elasticloadbalancingv2', '~> 1' 34 | spec.add_runtime_dependency 'aws-sdk-kafka', '~> 1' 35 | spec.add_runtime_dependency 'aws-sdk-pricing', '~> 1.9.0' 36 | spec.add_runtime_dependency 'aws-sdk-route53', '~> 1' 37 | spec.add_runtime_dependency 'aws-sdk-s3', '~> 1' 38 | 39 | spec.add_runtime_dependency 'deep_merge', '~> 1.1.1' 40 | spec.add_runtime_dependency 'netaddr', '~> 1.5' 41 | spec.add_runtime_dependency 'thor', '~> 0.19.1' 42 | spec.add_runtime_dependency 'xxhash', '~> 0.4.0' 43 | end 44 | --------------------------------------------------------------------------------