├── .gitignore ├── .travis.yml ├── Gemfile ├── LICENSE ├── README.md ├── bin ├── casher └── casher.rb └── spec └── casher_spec.rb /.gitignore: -------------------------------------------------------------------------------- 1 | Gemfile.lock 2 | 3 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: ruby 2 | 3 | rvm: 2.5 4 | 5 | script: bundle exec rspec spec 6 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | gem 'rspec' 4 | 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2013 Konstantin Haase 2 | 3 | Permission is hereby granted, free of charge, to any person 4 | obtaining a copy of this software and associated documentation 5 | files (the "Software"), to deal in the Software without 6 | restriction, including without limitation the rights to use, 7 | copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the 9 | Software is furnished to do so, subject to the following 10 | conditions: 11 | 12 | The above copyright notice and this permission notice shall be 13 | included in all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 17 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 18 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 19 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 20 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CA$H€R 2 | 3 | ## Purpose 4 | 5 | Casher is a Ruby script used by [Travis Build](https://github.com/travis-ci/travis-build) to fetch, create, and update caches. 6 | 7 | Casher checks the URLs provided to it for existing caches at the location and stops when it finds one. It will `fetch` the cache and expand it at the root of the OS. 8 | 9 | If a cache is not found at any of the URLs, Casher creates one. Casher will `add` the directories specified to be cached and then `push` the cache to a URL. 10 | 11 | Casher checks for changes in existing caches using `md5deep`, which can recursively compute the MD5 checksum for every file in a directory. If a difference in the MD5 checksums are found, casher will pack a new archive and `push` it to a URL. 12 | 13 | 14 | ## Interaction With The System 15 | 16 | [Travis Build](https://github.com/travis-ci/travis-build) generates a list of URLs for caches and provides them to Casher. If caching shortcuts are used in a user's Travis configuration, Travis Build converts them to directory information as Casher is only concerned about which directories to archive. 17 | 18 | Travis Build can also run `before_cache`, if it is specified in a user's .travis.yml, to remove files that could change the checksums and cause Casher to invalidate the cache. 19 | 20 | ## Status 21 | 22 | Casher is actively used by both Travis for private repositories and Travis for open source. It is maintained by @BanzaiMan and falls under Team Sapphire's domain. 23 | 24 | ## License & copyright information 25 | 26 | See LICENSE file. 27 | 28 | Copyright (c) 2011-2016 [Travis CI development 29 | team](https://github.com/travis-ci). 30 | 31 | ![Comic about cash](http://4.bp.blogspot.com/_XdP6Lp2ceqY/TEJuww2sk2I/AAAAAAAAWpY/EmWweRXahGM/s1600/tumblr_l55qcmER041qznd83o1_500.jpg) 32 | -------------------------------------------------------------------------------- /bin/casher: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | CURL_FORMAT=<<-EOF 4 | time_namelookup: %%{time_namelookup} s 5 | time_connect: %%{time_connect} s 6 | time_appconnect: %%{time_appconnect} s 7 | time_pretransfer: %%{time_pretransfer} s 8 | time_redirect: %%{time_redirect} s 9 | time_starttransfer: %%{time_starttransfer} s 10 | speed_download: %%{speed_download} bytes/s 11 | ---------- 12 | time_total: %%{time_total} s 13 | EOF 14 | 15 | ANSI_RED="\033[31;1m" 16 | ANSI_GREEN="\033[32;1m" 17 | ANSI_YELLOW="\033[33;1m" 18 | ANSI_RESET="\033[0m" 19 | ANSI_CLEAR="\033[0K" 20 | 21 | 22 | # TAR_DIR_NOT_FOUND_REGEXP = /(\w+): Not found in archive/ 23 | 24 | MD5DEEP_CHECK_LOG_LIMIT=1000 25 | 26 | CASHER_DIR=${HOME}/.casher 27 | PATHS_FILE=${CASHER_DIR}/paths 28 | CHECKSUM_FILE_BEFORE=${CASHER_DIR}/md5sums_before 29 | CHECKSUM_FILE_AFTER=${CASHER_DIR}/md5sums_after 30 | FETCH_TAR=${CASHER_DIR}/fetch.tgz 31 | PUSH_TAR=${CASHER_DIR}/push.tgz 32 | 33 | DIFF_FILE=${CASHER_DIR}/checksum_diff 34 | 35 | args=() 36 | 37 | function process_flags { 38 | local name 39 | for arg in $@; do 40 | case $arg in 41 | --name ) 42 | shift 43 | name=$1 44 | shift 45 | PATHS_FILE=${CASHER_DIR}/${name}.paths 46 | CHECKSUM_FILE_BEFORE=${CASHER_DIR}/${name}.md5sums_before 47 | CHECKSUM_FILE_AFTER=${CASHER_DIR}/${name}.md5sums_after 48 | FETCH_TAR=${CASHER_DIR}/${name}-fetch.tgz 49 | PUSH_TAR=${CASHER_DIR}/${name}-push.tgz 50 | ;; 51 | *) 52 | break 53 | ;; 54 | esac 55 | done 56 | 57 | args=($(echo "$@")) 58 | } 59 | 60 | function setup { 61 | install_md5deep 62 | mkdir -p $CASHER_DIR 63 | } 64 | 65 | function checksum_checker { 66 | if [[ -n $(command -v md5deep64) ]]; then 67 | echo "md5deep64" 68 | else 69 | echo "md5deep" 70 | fi 71 | } 72 | 73 | function msg { 74 | local text=$1 75 | local color=$2 76 | local marker 77 | 78 | case $color in 79 | green ) 80 | marker=$ANSI_GREEN 81 | ;; 82 | red ) 83 | marker=$ANSI_RED 84 | ;; 85 | yellow ) 86 | marker=$ANSI_YELLOW 87 | ;; 88 | * ) 89 | ;; 90 | esac 91 | 92 | printf "${marker}%s${ANSI_RESET}\n" "$text" 93 | } 94 | 95 | function run { 96 | local archive_type=$1 97 | case $archive_type in 98 | cache|workspace) 99 | shift 100 | ;; 101 | *) 102 | archive_type=cache 103 | ;; 104 | esac 105 | 106 | local subcommand=$1 107 | shift 108 | 109 | case $subcommand in 110 | fetch|add|push) 111 | $subcommand $@ 112 | ;; 113 | * ) 114 | echo "unknown command $subcommand" 115 | exit 1 116 | ;; 117 | esac 118 | } 119 | 120 | function fetch { 121 | msg "attempting to download ${archive_type} archive" 122 | 123 | for url in "$@"; do 124 | display_name=$(display_name $url) 125 | 126 | msg "fetching ${display_name}" green 127 | 128 | curl $url -o ${FETCH_TAR} -f -s --retry 3 >${CASHER_DIR}/fetch.log 2>${CASHER_DIR}/fetch.err.log 129 | if [[ $? = 0 ]]; then 130 | msg "found ${archive_type}" green 131 | return 132 | fi 133 | done 134 | 135 | msg "could not download ${archive_type}" red 136 | } 137 | 138 | function add { 139 | if [[ $# -eq 0 ]]; then 140 | msg "No directory specified to be added" yellow 141 | return 142 | fi 143 | 144 | paths=$(expand_path "$@") 145 | 146 | for path in ${paths}; do 147 | if [[ -L ${path} ]]; then 148 | msg "${path} is a symbolic link to $(readlink ${path}); not following" 149 | continue 150 | fi 151 | 152 | msg "adding ${path} to ${archive_type}" 153 | if [[ ! -e "${path}" ]]; then 154 | msg "creating directory ${path}" 155 | mkdir -p ${path} 156 | fi 157 | 158 | echo "${path}" >> ${PATHS_FILE} 159 | done 160 | 161 | touch ${CHECKSUM_FILE_BEFORE} 162 | 163 | if [[ -e ${FETCH_TAR} ]]; then 164 | # expand the archive, while recording directories not found in the archive 165 | dir_not_found=$(tar xPf ${FETCH_TAR} ${paths} 2>&1 | grep 'Not found' | sed -e 's/tar: \(.*\): Not found.*$/\1/g') 166 | IFS=" " 167 | for dir in ${dir_not_found}; do 168 | msg "${dir} is not yet cached" red 169 | done 170 | unset IFS 171 | fi 172 | 173 | $(checksum_checker) -o f -r ${paths} | sort >> ${CHECKSUM_FILE_BEFORE} 174 | } 175 | 176 | function push { 177 | local url 178 | url=$1 179 | 180 | changed_p 181 | if [[ $CHANGED_P != TRUE ]]; then 182 | msg "nothing changed" green 183 | return 184 | fi 185 | 186 | msg "changes detected, packing new archive" green 187 | 188 | GZIP=-3 tar --format=posix -Pczf ${PUSH_TAR} $(<${PATHS_FILE}) 189 | 190 | msg "uploading $(display_name $url)" green 191 | curl -T ${PUSH_TAR} $url -f -v >${CASHER_DIR}/push.log 2>${CASHER_DIR}/push.err.log 192 | if [[ $? -eq 0 ]]; then 193 | msg "${archive_type} uploaded" green 194 | else 195 | msg "failed to upload ${archive_type}" red 196 | # TODO filter and dump logs 197 | fi 198 | } 199 | 200 | function install_md5deep { 201 | local os 202 | 203 | if [[ -z $(command -v md5deep) ]]; then 204 | msg "Installing md5deep" 205 | os=$(uname | tr 'A-Z' 'a-z') 206 | case ${os} in 207 | darwin ) 208 | brew install md5deep 209 | ;; 210 | linux) 211 | # on apt, `md5deep` is a wrapper for `hashdeep`, which provides 212 | # /usr/bin/hashdeep 213 | if [[ -z $(command -v hashdeep) ]]; then 214 | sudo apt-get install md5deep 215 | fi 216 | # on Xenial only, the apt package does not contain `/usr/bin/md5deep` 217 | # (but it does, Bionic onward) so we create a symlink to sidestep the issue 218 | if [[ ! -e /usr/bin/md5deep && -e /usr/bin/hashdeep ]]; then 219 | sudo ln -sf /usr/bin/hashdeep /usr/bin/md5deep 220 | fi 221 | ;; 222 | msys_nt* | mingw*) 223 | chocolatey install hashdeep 224 | ;; 225 | freebsd) 226 | sudo pkg install -y md5deep 227 | ;; 228 | esac 229 | fi 230 | } 231 | 232 | function changed_p { 233 | local diff_file_size change_msg 234 | unset CHANGED_P 235 | 236 | if [[ ! -e ${FETCH_TAR} ]]; then 237 | CHANGED_P=TRUE 238 | return 239 | fi 240 | 241 | sort ${CHECKSUM_FILE_BEFORE} | uniq > ${CHECKSUM_FILE_BEFORE}.sorted 242 | $(checksum_checker) -o f -r $(<${PATHS_FILE}) | sort | uniq > ${CHECKSUM_FILE_AFTER} 243 | diff -B ${CHECKSUM_FILE_BEFORE}.sorted ${CHECKSUM_FILE_AFTER} | \ 244 | awk '/^[<>]/ {for (i=3; i<=NF; i++) printf("%s%s", $(i), i ${DIFF_FILE} 245 | 246 | if [[ -s ${DIFF_FILE} ]]; then # DIFF_FILE has nonzero file size 247 | change_msg="changes detected (content changed, file is created, or file is deleted):\n$(head -c ${MD5DEEP_CHECK_LOG_LIMIT} ${DIFF_FILE})\n" 248 | diff_file_size=$(wc -c ${DIFF_FILE} | awk '{print $1}') 249 | if [[ ${diff_file_size} -gt ${MD5DEEP_CHECK_LOG_LIMIT} ]]; then 250 | change_msg="${change_msg}..." 251 | fi 252 | msg "${change_msg}" green 253 | CHANGED_P=TRUE 254 | fi 255 | } 256 | 257 | function display_name { 258 | local url penultimate ultimate 259 | url=$1 260 | 261 | IFS=/ 262 | for part in ${url%\?*}; do 263 | penultimate=$ultimate 264 | ultimate=$part 265 | done 266 | unset IFS 267 | echo "${penultimate}/${ultimate}" 268 | } 269 | 270 | function expand_path { 271 | ruby -e "puts ARGV.map{|x| File.expand_path(x)}.join(' ')" "$@" 272 | } 273 | 274 | function main { 275 | # $0 [opts] [cache|workspace] [globs] 276 | process_flags "$@" 277 | setup 278 | run "${args[@]}" 279 | } 280 | 281 | main "$@" 282 | -------------------------------------------------------------------------------- /bin/casher.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | require 'timeout' 3 | require 'shellwords' 4 | require 'fileutils' 5 | require 'yaml' 6 | require 'uri' 7 | require 'open3' 8 | 9 | class Casher 10 | include FileUtils 11 | 12 | CURL_FORMAT = <<-EOF 13 | time_namelookup: %%{time_namelookup} s 14 | time_connect: %%{time_connect} s 15 | time_appconnect: %%{time_appconnect} s 16 | time_pretransfer: %%{time_pretransfer} s 17 | time_redirect: %%{time_redirect} s 18 | time_starttransfer: %%{time_starttransfer} s 19 | speed_download: %%{speed_download} bytes/s 20 | ---------- 21 | time_total: %%{time_total} s 22 | EOF 23 | 24 | ANSI_RED="\033[31;1m" 25 | ANSI_GREEN="\033[32;1m" 26 | ANSI_YELLOW="\033[33;1m" 27 | ANSI_RESET="\033[0m" 28 | ANSI_CLEAR="\033[0K" 29 | 30 | 31 | TAR_DIR_NOT_FOUND_REGEXP = /(\w+): Not found in archive/ 32 | 33 | MD5DEEP_CHECK_LOG_LIMIT = 1000 34 | 35 | def initialize 36 | @casher_dir = ENV['CASHER_DIR'] || File.expand_path(".casher", ENV["HOME"]) 37 | @mtime_file = File.expand_path('mtime.yml', @casher_dir) 38 | @checksum_file_before = File.expand_path('md5sums_before', @casher_dir) 39 | @checksum_file_after = File.expand_path('md5sums_after', @casher_dir) 40 | @fetch_tar = File.expand_path('fetch.tbz', @casher_dir) 41 | @push_tar = File.expand_path('push.tbz', @casher_dir) 42 | @paths_file = File.expand_path('paths', @casher_dir) 43 | @mtimes = File.exist?(@mtime_file) && File.size(@mtime_file) > 0 ? YAML.load_file(@mtime_file) : {} 44 | @timeout = Integer(ENV['CASHER_TIME_OUT'] || 3*60) 45 | 46 | @counter = 0 47 | 48 | mkdir_p @casher_dir 49 | end 50 | 51 | def run(command, *arguments) 52 | raise "unknown command" unless %w[fetch add push].include? command 53 | Timeout.timeout(@timeout) { send(command, *arguments) } 54 | rescue TimeoutError 55 | line = "casher #{command}" 56 | line += Shellwords.join(arguments) if command == "add" 57 | cleanup_cache if command == "fetch" 58 | $stderr.puts "running `#{line}` took longer than #{@timeout} seconds and has been aborted.\nYou can extend the timeout with `cache.timeout`. See https://docs.travis-ci.com/user/caching/#Setting-the-timeout for details" 59 | end 60 | 61 | def fetch(*urls) 62 | msg "attempting to download cache archive" 63 | archive_found = false 64 | urls.each do |url| 65 | msg "fetching #{cache_archive_name(url)}" 66 | 67 | @fetch_tar = File.expand_path('fetch.tgz', @casher_dir) if path_ext(url) == 'tgz' 68 | 69 | if system "curl --tcp-nodelay -w '#{CURL_FORMAT}' %p -o %p -f -s --retry 3 >#{@casher_dir}/fetch.log 2>#{@casher_dir}/fetch.err.log" % [url, @fetch_tar] 70 | msg "found cache" 71 | archive_found = true 72 | break 73 | end 74 | end 75 | cleanup_cache unless archive_found 76 | end 77 | 78 | def cleanup_cache 79 | msg "could not download cache", :red 80 | if File.exist? @fetch_tar 81 | rm @fetch_tar 82 | end 83 | end 84 | 85 | def add(*paths) 86 | expanded_paths = paths.map { |p| File.expand_path(p) } 87 | expanded_paths.map do |p| 88 | if File.symlink? p 89 | msg "#{p} is a symbolic link to #{File.readlink p}; not following", :yellow 90 | next 91 | end 92 | msg "adding #{p} to cache" 93 | unless File.exist?(p) 94 | msg "creating directory #{p}" 95 | mkdir_p p 96 | end 97 | end 98 | 99 | File.open(@paths_file, 'a') { |f| f << expanded_paths.compact.join("\n") << "\n" } 100 | FileUtils.touch(@checksum_file_before) 101 | 102 | if fetched_archive 103 | output, errors = tar(:x, fetched_archive, *expanded_paths) do 104 | sleep 1 105 | end 106 | 107 | dirs_not_in_archive = errors.scan(TAR_DIR_NOT_FOUND_REGEXP).flatten.uniq 108 | 109 | dirs_not_in_archive.each do |dir| 110 | msg "#{dir} is not yet cached", :red 111 | end 112 | 113 | expanded_paths.map { |p| @mtimes[p] = Time.now.to_i } 114 | 115 | if md5deep_available? 116 | system "md5deep -o f -r #{expanded_paths.compact.map { |p| Shellwords.escape(p) }.join(' ')} | sort >> #{@checksum_file_before}" 117 | else 118 | mtime_file = File.open(@mtime_file, 'w') 119 | mtime_file.write @mtimes.to_yaml 120 | end 121 | end 122 | 123 | end 124 | 125 | def push(url) 126 | cached_directories.map { |p| p = File.dirname(p) if File.file?(p); mkdir_p p } 127 | 128 | unless changed? 129 | msg "nothing changed, not updating cache" 130 | return 131 | end 132 | 133 | @push_tar = File.expand_path('push.tgz', @casher_dir) if path_ext(url) == 'tgz' 134 | 135 | msg "changes detected, packing new archive" 136 | 137 | tar(:c, @push_tar, *cached_directories) do 138 | @counter += 1 139 | puts "." if @counter % 5 == 0 140 | sleep 1 141 | end 142 | 143 | msg "uploading #{cache_archive_name(url)}" 144 | if system "curl -T %p %p -f -v >#{@casher_dir}/push.log 2>#{@casher_dir}/push.err.log" % [@push_tar, url] 145 | msg "cache uploaded" 146 | else 147 | msg "failed to upload cache", :red 148 | puts filter_http_params(File.read("#{@casher_dir}/push.err.log")), filter_http_params(File.read("#{@casher_dir}/push.log")) 149 | end 150 | 151 | end 152 | 153 | def changed? 154 | return true unless fetched_archive 155 | 156 | if md5deep_available? 157 | paths = File.read(@paths_file).split("\n") 158 | diff_file = File.expand_path('checksum_diff', @casher_dir) 159 | system("sort #{@checksum_file_before} | uniq > #{@checksum_file_before}.sorted") 160 | system(<<-SCRIPT) 161 | md5deep -o f -r #{paths.map { |p| Shellwords.escape(p) }.join(" ")} | sort | uniq > #{@checksum_file_after}; 162 | diff -B #{@checksum_file_before}.sorted #{@checksum_file_after} | \ 163 | awk '/^[<>]/ {for (i=3; i<=NF; i++) printf(\"%s%s\", $(i), i #{diff_file} 164 | SCRIPT 165 | result = (File.size(@checksum_file_before) == 0 && File.size(@checksum_file_after) > 0) || File.size(diff_file) > 0 166 | 167 | if File.size(diff_file) > 0 168 | first_bytes = File.read(diff_file, MD5DEEP_CHECK_LOG_LIMIT + 1) 169 | msg "change detected (content changed, file is created, or file is deleted):\n#{first_bytes}\n" 170 | msg "...\n" if first_bytes.size > MD5DEEP_CHECK_LOG_LIMIT 171 | end 172 | 173 | return result 174 | end 175 | 176 | @mtimes.any? do |path, mtime| 177 | Dir.glob("#{path}/**/*").any? do |file| 178 | next if File.mtime(file).to_i <= mtime 179 | next if File.directory?(file) 180 | msg "change detected: #{file}" 181 | true 182 | end 183 | end 184 | end 185 | 186 | def tar(flag, file, *args, &block) 187 | compression_flag = file.end_with?('.tbz') ? 'j' : 'z' 188 | 189 | cmd = "tar -P#{compression_flag}#{flag}f #{Shellwords.escape(file)} #{Shellwords.join(args)}" 190 | if ! system('sw_vers', [:out, :err] => '/dev/null') and system "sudo -v" 191 | cmd = "sudo " + cmd 192 | end 193 | stdin, stdout, stderr, wait_thr = Open3.popen3(cmd) 194 | while wait_thr.status do 195 | yield 196 | end 197 | 198 | errors = stderr.read 199 | output = stdout.read 200 | File.write(File.join(@casher_dir, 'tar.log'), output) 201 | File.write(File.join(@casher_dir, 'tar.err.log'), errors) 202 | status = wait_thr.value 203 | 204 | if !status.success? && flag.to_s != 'x' 205 | msg "FAILED: #{cmd}", :red 206 | puts errors, output 207 | end 208 | 209 | [stdout, errors] 210 | end 211 | 212 | def cache_archive_name(url) 213 | %r(([^/]+?\/[^/]+?)(\?.*)?\?).match(url)[1] 214 | end 215 | 216 | def path_ext(url) 217 | path = URI.split(url)[5] 218 | path.split('.').last 219 | end 220 | 221 | def fetched_archive 222 | [ File.expand_path('fetch.tbz', @casher_dir), File.expand_path('fetch.tgz', @casher_dir) ].find do |f| 223 | File.exist? f 224 | end 225 | end 226 | 227 | def md5deep_available? 228 | @md5deep_available ||= (system("which md5deep >/dev/null 2>&1") || install_md5deep) 229 | end 230 | 231 | def install_md5deep 232 | if ENV['TRAVIS_OS_NAME'] == 'osx' 233 | system('brew install md5deep') 234 | end 235 | end 236 | 237 | def cached_directories 238 | if File.exist?(@paths_file) 239 | File.read(@paths_file).split("\n").compact 240 | else 241 | @mtimes.keys 242 | end 243 | end 244 | 245 | def msg(text, color = :green) 246 | marker = case color 247 | when :green 248 | ANSI_GREEN 249 | when :red 250 | ANSI_RED 251 | when :yellow 252 | ANSI_YELLOW 253 | end 254 | 255 | puts "#{marker}#{text}#{ANSI_RESET}" 256 | end 257 | 258 | def filter_http_params(text) 259 | text.gsub(/\b(http[^\?]+)\?\S*/, '\1') 260 | end 261 | end 262 | 263 | Casher.new.run(*ARGV) if $0 == __FILE__ 264 | -------------------------------------------------------------------------------- /spec/casher_spec.rb: -------------------------------------------------------------------------------- 1 | load File.join(File.dirname(__FILE__), '..', 'bin', 'casher.rb') 2 | 3 | describe Casher do 4 | let(:tbz_url) { 'https://example.com/afdfad/master/cache--rvm-default--gemfile-Gemfile.tbz?param1=value1¶m2=value2' } 5 | let(:tgz_url) { 'https://example.com/afdfad/master/cache--rvm-default--gemfile-Gemfile.tgz?param1=value1¶m2=value2' } 6 | subject { described_class.new() } 7 | 8 | describe '#run' do 9 | it 'calls "curl" to download archives' do 10 | expect(subject).to receive(:system).with(/curl\b.*#{tgz_url.gsub('?','\?')}/m).and_return(true) 11 | expect(subject).not_to receive(:system).with(/curl\b.*#{tbz_url.gsub('?','\?')}/m) 12 | subject.run('fetch', tgz_url, tbz_url) 13 | end 14 | end 15 | 16 | context 'when the first archive is not available' do 17 | before :each do 18 | expect(subject).to receive(:system).with(/curl\b.*#{tgz_url.gsub('?','\?')}/m).and_return(false) 19 | end 20 | 21 | it 'falls back to the next archive' do 22 | expect(subject).to receive(:system).with(/curl\b.*#{tbz_url.gsub('?','\?')}/m) 23 | subject.run('fetch', tgz_url, tbz_url) 24 | end 25 | end 26 | 27 | context 'when the curl call times out' do 28 | before :each do 29 | expect(subject).to receive(:system).with(/curl\b.*#{tgz_url.gsub('?','\?')}/m).and_raise(TimeoutError) 30 | end 31 | 32 | it 'cleans up the fetch_tar' do 33 | expect(subject).to receive(:cleanup_cache) 34 | subject.run('fetch', tgz_url) 35 | end 36 | 37 | end 38 | end 39 | --------------------------------------------------------------------------------