├── .gitignore ├── LICENSE ├── README.md ├── build.sh └── main.go /.gitignore: -------------------------------------------------------------------------------- 1 | dist/* 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 Say Media Inc 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # terraform-s3-dir 2 | 3 | ``terraform-s3-dir`` is a small utility that takes a directory of files and produces a configuration file for [Terraform](https://terraform.io/) that will upload those files into a particular named S3 bucket. 4 | 5 | It could be useful for using Terraform to deploy a static website to S3's website publishing feature. 6 | 7 | This utility just generates the ``aws_s3_bucket_object`` configurations. It's up to the user to separately create the bucket into which the objects will be placed. 8 | 9 | ## Installing 10 | 11 | Pretty standard Go program: 12 | 13 | * ``go get github.com/saymedia/terraform-s3-dir`` 14 | * ``go install github.com/saymedia/terraform-s3-dir`` 15 | 16 | ## Usage 17 | 18 | ``` 19 | Usage: terraform-s3-dir [-h] [-x glob patterns to exclude] 20 | -h, --help 21 | -x, --exclude=glob patterns to exclude 22 | ``` 23 | 24 | -------------------------------------------------------------------------------- /build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -ex 4 | IFS=$' \n\t' 5 | VERSION="$(git describe)" 6 | 7 | go get github.com/mitchellh/gox 8 | 9 | gox -arch="amd64" -os="linux darwin" -output="dist/{{.OS}}/{{.Dir}}" . 10 | 11 | cd dist/linux 12 | tar -Jcvf ../terraform-s3-dir-"$VERSION"-linux.tar.xz ./* 13 | cd ../darwin 14 | tar -Jcvf ../terraform-s3-dir-"$VERSION"-darwin.tar.xz ./* 15 | -------------------------------------------------------------------------------- /main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "crypto/sha1" 5 | "encoding/json" 6 | "fmt" 7 | "io" 8 | "net/http" 9 | "os" 10 | "path/filepath" 11 | "strings" 12 | 13 | getopt "github.com/pborman/getopt" 14 | ) 15 | 16 | var exclude = getopt.ListLong("exclude", 'x', "", "glob patterns to exclude") 17 | var help = getopt.BoolLong("help", 'h', "", "print this help") 18 | 19 | func main() { 20 | getopt.SetParameters(" ") 21 | getopt.Parse() 22 | if *help { 23 | getopt.PrintUsage(os.Stdout) 24 | return 25 | } 26 | 27 | args := getopt.Args() 28 | if len(args) != 2 { 29 | getopt.PrintUsage(os.Stderr) 30 | os.Exit(1) 31 | } 32 | 33 | rootDir := args[0] 34 | bucketName := args[1] 35 | 36 | resourcesMap := map[string]interface{}{} 37 | result := map[string]interface{}{ 38 | "resource": map[string]interface{}{ 39 | "aws_s3_bucket_object": resourcesMap, 40 | }, 41 | } 42 | 43 | filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error { 44 | if err != nil { 45 | fmt.Fprintf(os.Stderr, "Error reading %s: %s\n", path, err) 46 | // Skip stuff we can't read. 47 | return nil 48 | } 49 | 50 | relPath, err := filepath.Rel(rootDir, path) 51 | if err != nil { 52 | fmt.Fprintf(os.Stderr, "Failed make %s relative: %s\n", path, err) 53 | return nil 54 | } 55 | 56 | path, err = filepath.EvalSymlinks(path) 57 | if err != nil { 58 | fmt.Fprintf(os.Stderr, "Failed to resolve symlink %s: %s\n", path, err) 59 | return nil 60 | } 61 | 62 | if info.IsDir() { 63 | // Don't need to create directories since they are implied 64 | // by the files within. 65 | return nil 66 | } 67 | 68 | for _, pattern := range *exclude { 69 | var toMatch []string 70 | if strings.ContainsRune(pattern, filepath.Separator) { 71 | toMatch = append(toMatch, relPath) 72 | } else { 73 | // If the pattern does not include a path separator 74 | // then we apply it to all segments of the path 75 | // individually. 76 | toMatch = strings.Split(relPath, string(filepath.Separator)) 77 | } 78 | 79 | for _, matchPath := range toMatch { 80 | matched, _ := filepath.Match(pattern, matchPath) 81 | if matched { 82 | return nil 83 | } 84 | } 85 | } 86 | 87 | // We use the initial bytes of the file to infer a MIME type 88 | file, err := os.Open(path) 89 | if err != nil { 90 | fmt.Fprintf(os.Stderr, "Error opening %s: %s\n", path, err) 91 | return nil 92 | } 93 | hasher := sha1.New() 94 | fileBytes := make([]byte, 1024*1024) 95 | contentType := "" 96 | _, err = file.Read(fileBytes) 97 | // If we got back and error and it isn't the end of file then 98 | // skip it. This does "something" with 0 length files. It is 99 | // likely we should really be categorizing those based on file 100 | // extension. 101 | if err != nil && err != io.EOF { 102 | fmt.Fprintf(os.Stderr, "Error reading %s: %s\n", path, err) 103 | return nil 104 | } 105 | if strings.HasSuffix(relPath, ".svg") { 106 | // If we start to need a set of overrides for DetectContentType 107 | // then we need to find a different way to do this. 108 | contentType = "image/svg+xml" 109 | } else if strings.HasSuffix(relPath, ".css") { 110 | // If we start to need a set of overrides for DetectContentType 111 | // then we need to find a different way to do this. 112 | contentType = "text/css" 113 | } else { 114 | contentType = http.DetectContentType(fileBytes) 115 | } 116 | 117 | // Resource name is a hash of the path, so it should stay consistent 118 | // for a given file path as long as the relative path to the target 119 | // directory is always the same across runs. 120 | hasher.Write([]byte(relPath)) 121 | resourceName := fmt.Sprintf("%x", hasher.Sum(nil)) 122 | 123 | resourcesMap[resourceName] = map[string]interface{}{ 124 | "bucket": bucketName, 125 | "key": relPath, 126 | "source": path, 127 | "etag": fmt.Sprintf("${md5(file(%q))}", path), 128 | "content_type": contentType, 129 | } 130 | 131 | return nil 132 | }) 133 | 134 | encoder := json.NewEncoder(os.Stdout) 135 | encoder.Encode(result) 136 | } 137 | --------------------------------------------------------------------------------