├── LICENSE ├── README.md ├── src.sh └── src ├── curl.sh ├── date.sh ├── expect.sh ├── file.sh ├── git.sh ├── hash.sh ├── line.sh ├── log.sh ├── package.sh ├── platform.sh ├── s3.sh ├── sort.sh └── tar.sh /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2014-2015, Mietek Bak 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ------------------------------------------------------------------------------- 2 | 3 | This project is no longer maintained. 4 | 5 | ------------------------------------------------------------------------------- 6 | 7 | 8 | [_bashmenot_](https://bashmenot.mietek.io/) 9 | =========================================== 10 | 11 | _bashmenot_ is a library of [GNU _bash_](https://gnu.org/software/bash/) functions, used by [Halcyon](https://halcyon.sh/) and [Haskell on Heroku](https://haskellonheroku.com/). 12 | 13 | See the [_bashmenot_ website](https://bashmenot.mietek.io/) for more information. 14 | 15 | 16 | Usage 17 | ----- 18 | 19 | ``` 20 | $ source bashmenot/src.sh 21 | ``` 22 | 23 | 24 | ### Installation 25 | 26 | _bashmenot_ can be installed by cloning the [_bashmenot_ source repository](https://github.com/mietek/bashmenot): 27 | 28 | ``` 29 | $ git clone https://github.com/mietek/bashmenot 30 | ``` 31 | 32 | 33 | ### Documentation 34 | 35 | - See the [_bashmenot_ reference](https://bashmenot.mietek.io/reference/) for a complete list of available functions and options. 36 | 37 | - Read the [_bashmenot_ source code](https://github.com/mietek/bashmenot) to understand how it works. 38 | 39 | 40 | About 41 | ----- 42 | 43 | Made by [Miëtek Bak](https://mietek.io/). Published under the BSD license. 44 | -------------------------------------------------------------------------------- /src.sh: -------------------------------------------------------------------------------- 1 | unset POSIXLY_CORRECT 2 | 3 | set -o pipefail 4 | 5 | export BASHMENOT_DIR 6 | BASHMENOT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P ) 7 | 8 | source "${BASHMENOT_DIR}/src/date.sh" 9 | source "${BASHMENOT_DIR}/src/sort.sh" 10 | source "${BASHMENOT_DIR}/src/log.sh" 11 | source "${BASHMENOT_DIR}/src/expect.sh" 12 | source "${BASHMENOT_DIR}/src/platform.sh" 13 | source "${BASHMENOT_DIR}/src/line.sh" 14 | source "${BASHMENOT_DIR}/src/file.sh" 15 | source "${BASHMENOT_DIR}/src/package.sh" 16 | source "${BASHMENOT_DIR}/src/hash.sh" 17 | source "${BASHMENOT_DIR}/src/tar.sh" 18 | source "${BASHMENOT_DIR}/src/git.sh" 19 | source "${BASHMENOT_DIR}/src/curl.sh" 20 | source "${BASHMENOT_DIR}/src/s3.sh" 21 | 22 | 23 | bashmenot_self_update () { 24 | if (( ${BASHMENOT_NO_SELF_UPDATE:-0} )) || 25 | [[ ! -d "${BASHMENOT_DIR}/.git" ]] 26 | then 27 | return 0 28 | fi 29 | 30 | local now candidate_time 31 | now=$( get_current_time ) 32 | if candidate_time=$( get_modification_time "${BASHMENOT_DIR}" ) && 33 | (( candidate_time + 60 >= now )) 34 | then 35 | return 0 36 | fi 37 | 38 | local url 39 | url="${BASHMENOT_URL:-https://github.com/mietek/bashmenot}" 40 | 41 | log_begin 'Self-updating bashmenot...' 42 | 43 | local commit_hash 44 | if ! commit_hash=$( git_update_into "${url}" "${BASHMENOT_DIR}" ); then 45 | log_end 'error' 46 | return 0 47 | fi 48 | log_end "done, ${commit_hash}" 49 | 50 | touch "${BASHMENOT_DIR}" || return 1 51 | 52 | BASHMENOT_NO_SELF_UPDATE=1 \ 53 | source "${BASHMENOT_DIR}/src.sh" 54 | } 55 | 56 | 57 | if ! bashmenot_self_update; then 58 | log_error 'Failed to self-update bashmenot' 59 | exit 1 60 | fi 61 | -------------------------------------------------------------------------------- /src/curl.sh: -------------------------------------------------------------------------------- 1 | format_http_code_description () { 2 | local code 3 | expect_args code -- "$@" 4 | 5 | case "${code}" in 6 | '100') echo '100 (continue)';; 7 | '101') echo '101 (switching protocols)';; 8 | '200') echo 'done';; 9 | '201') echo '201 (created)';; 10 | '202') echo '202 (accepted)';; 11 | '203') echo '203 (non-authoritative information)';; 12 | '204') echo '204 (no content)';; 13 | '205') echo '205 (reset content)';; 14 | '206') echo '206 (partial content)';; 15 | '300') echo '300 (multiple choices)';; 16 | '301') echo '301 (moved permanently)';; 17 | '302') echo '302 (found)';; 18 | '303') echo '303 (see other)';; 19 | '304') echo '304 (not modified)';; 20 | '305') echo '305 (use proxy)';; 21 | '306') echo '306 (switch proxy)';; 22 | '307') echo '307 (temporary redirect)';; 23 | '400') echo '400 (bad request)';; 24 | '401') echo '401 (unauthorized)';; 25 | '402') echo '402 (payment required)';; 26 | '403') echo '403 (forbidden)';; 27 | '404') echo '404 (not found)';; 28 | '405') echo '405 (method not allowed)';; 29 | '406') echo '406 (not acceptable)';; 30 | '407') echo '407 (proxy authentication required)';; 31 | '408') echo '408 (request timeout)';; 32 | '409') echo '409 (conflict)';; 33 | '410') echo '410 (gone)';; 34 | '411') echo '411 (length required)';; 35 | '412') echo '412 (precondition failed)';; 36 | '413') echo '413 (request entity too large)';; 37 | '414') echo '414 (request URI too long)';; 38 | '415') echo '415 (unsupported media type)';; 39 | '416') echo '416 (requested range)';; 40 | '417') echo '417 (expectation failed)';; 41 | '418') echo "418 (I'm a teapot)";; 42 | '419') echo '419 (authentication timeout)';; 43 | '420') echo '420 (enhance your calm)';; 44 | '426') echo '426 (upgrade required)';; 45 | '428') echo '428 (precondition required)';; 46 | '429') echo '429 (too many requests)';; 47 | '431') echo '431 (request header fields too large)';; 48 | '451') echo '451 (unavailable for legal reasons)';; 49 | '500') echo '500 (internal server error)';; 50 | '501') echo '501 (not implemented)';; 51 | '502') echo '502 (bad gateway)';; 52 | '503') echo '503 (service unavailable)';; 53 | '504') echo '504 (gateway timeout)';; 54 | '505') echo '505 (HTTP version not supported)';; 55 | '506') echo '506 (variant also negotiates)';; 56 | '510') echo '510 (not extended)';; 57 | '511') echo '511 (network authentication required)';; 58 | *) echo "${code} (unknown)" 59 | esac 60 | } 61 | 62 | 63 | return_http_code_status () { 64 | local code 65 | expect_args code -- "$@" 66 | 67 | case "${code}" in 68 | '2'*) return 0;; 69 | '3'*) return 3;; 70 | '4'*) return 4;; 71 | '5'*) return 5;; 72 | *) return 1 73 | esac 74 | } 75 | 76 | 77 | curl_do () { 78 | local url 79 | expect_args url -- "$@" 80 | shift 81 | 82 | # NOTE: On Debian 6, curl considers HTTP 40* errors to be transient, 83 | # which makes using the --retry option impractical. Additionally, 84 | # in some circumstances, curl writes out 100 and fails instead 85 | # of automatically continuing. 86 | # http://curl.haxx.se/mail/lib-2011-03/0161.html 87 | local max_retries retries code 88 | max_retries="${BASHMENOT_CURL_RETRIES:-5}" 89 | retries="${max_retries}" 90 | code= 91 | while (( retries )); do 92 | code=$( 93 | curl "${url}" \ 94 | --fail \ 95 | --location \ 96 | --silent \ 97 | --show-error \ 98 | --write-out '%{http_code}' \ 99 | "$@" \ 100 | 2>'/dev/null' 101 | ) || true 102 | 103 | local code_description 104 | code_description=$( format_http_code_description "${code}" ) 105 | log_indent_end "${code_description}" 106 | 107 | if [[ "${code}" =~ '2'.* ]]; then 108 | break 109 | fi 110 | if [[ "${code}" =~ '4'.* ]] && ! (( ${BASHMENOT_INTERNAL_CURL_RETRY_ALL:-0} )); then 111 | break 112 | fi 113 | 114 | retries=$(( retries - 1 )) 115 | if (( retries )); then 116 | local retry delay 117 | retry=$(( max_retries - retries )) 118 | delay=$(( 2**retry )) 119 | 120 | log_indent_begin "Retrying in ${delay} seconds (${retry}/${max_retries})..." 121 | sleep "${delay}" || true 122 | fi 123 | done 124 | 125 | return_http_code_status "${code}" || return 126 | } 127 | 128 | 129 | curl_download () { 130 | local src_file_url dst_file 131 | expect_args src_file_url dst_file -- "$@" 132 | 133 | log_indent_begin "Downloading ${src_file_url}..." 134 | 135 | local dst_dir 136 | dst_dir=$( dirname "${dst_file}" ) || return 1 137 | 138 | mkdir -p "${dst_dir}" || return 1 139 | 140 | curl_do "${src_file_url}" \ 141 | --output "${dst_file}" || return 142 | } 143 | 144 | 145 | curl_check () { 146 | local src_url 147 | expect_args src_url -- "$@" 148 | 149 | log_indent_begin "Checking ${src_url}..." 150 | 151 | curl_do "${src_url}" \ 152 | --output '/dev/null' \ 153 | --head || return 154 | } 155 | 156 | 157 | curl_upload () { 158 | local src_file dst_file_url 159 | expect_args src_file dst_file_url -- "$@" 160 | 161 | expect_existing "${src_file}" || return 1 162 | 163 | log_indent_begin "Uploading ${dst_file_url}..." 164 | 165 | curl_do "${dst_file_url}" \ 166 | --output '/dev/null' \ 167 | --upload-file "${src_file}" || return 168 | } 169 | 170 | 171 | curl_delete () { 172 | local dst_url 173 | expect_args dst_url -- "$@" 174 | 175 | log_indent_begin "Deleting ${dst_url}..." 176 | 177 | curl_do "${dst_url}" \ 178 | --output '/dev/null' \ 179 | --request DELETE || return 180 | } 181 | -------------------------------------------------------------------------------- /src/date.sh: -------------------------------------------------------------------------------- 1 | case $( uname -s ) in 2 | 'Linux'|'FreeBSD') 3 | get_date () { 4 | date -u "$@" || return 0 5 | } 6 | ;; 7 | *) 8 | get_date () { 9 | gdate -u "$@" || return 0 10 | } 11 | esac 12 | 13 | 14 | get_http_date () { 15 | get_date -R "$@" || return 0 16 | } 17 | 18 | 19 | get_current_time () { 20 | get_date '+%s' "$@" || return 0 21 | } 22 | -------------------------------------------------------------------------------- /src/expect.sh: -------------------------------------------------------------------------------- 1 | expect_args () { 2 | local -a expect_internal_specs_a 3 | local expect_internal_status 4 | expect_internal_status=1 5 | while (( $# )); do 6 | if [[ "$1" == -- ]]; then 7 | expect_internal_status=0 8 | shift 9 | break 10 | fi 11 | expect_internal_specs_a+=( "$1" ) 12 | shift 13 | done 14 | if (( expect_internal_status )); then 15 | die "${FUNCNAME[1]:--}: Expected specs, guard, and args:" 'arg1 .. argN -- "$@"' 16 | fi 17 | 18 | local expect_internal_spec 19 | for expect_internal_spec in "${expect_internal_specs_a[@]}"; do 20 | if ! (( $# )); then 21 | die "${FUNCNAME[1]:--}: Expected args: ${expect_internal_specs_a[*]:-}" 22 | fi 23 | eval "${expect_internal_spec}=\$1" 24 | shift 25 | done 26 | } 27 | 28 | 29 | expect_vars () { 30 | while (( $# )); do 31 | if [[ -z "${!1:+_}" ]]; then 32 | die "${FUNCNAME[1]:--}: Expected var: $1" 33 | fi 34 | shift 35 | done 36 | } 37 | 38 | 39 | expect_existing () { 40 | while (( $# )); do 41 | if [[ ! -e "$1" ]]; then 42 | log_error "${FUNCNAME[1]:--}: Expected existing $1" 43 | return 1 44 | fi 45 | shift 46 | done 47 | } 48 | 49 | 50 | expect_no_existing () { 51 | while (( $# )); do 52 | if [[ -e "$1" ]]; then 53 | log_error "${FUNCNAME[1]:--}: Unexpected existing $1" 54 | return 1 55 | fi 56 | shift 57 | done 58 | } 59 | -------------------------------------------------------------------------------- /src/file.sh: -------------------------------------------------------------------------------- 1 | get_tmp_file () { 2 | local base 3 | expect_args base -- "$@" 4 | 5 | local template 6 | if [[ -z "${BASHMENOT_INTERNAL_TMP:-}" ]]; then 7 | template="/tmp/${base}.XXXXXXXXXX" 8 | else 9 | template="${BASHMENOT_INTERNAL_TMP}/${base}.XXXXXXXXXX" 10 | fi 11 | 12 | local tmp_file 13 | if ! tmp_file=$( mktemp -u "${template}" ); then 14 | log_error 'Failed to create temporary file' 15 | return 1 16 | fi 17 | 18 | echo "${tmp_file}" 19 | } 20 | 21 | 22 | get_tmp_dir () { 23 | local base 24 | expect_args base -- "$@" 25 | 26 | local template 27 | if [[ -z "${BASHMENOT_INTERNAL_TMP:-}" ]]; then 28 | template="/tmp/${base}.XXXXXXXXXX" 29 | else 30 | template="${BASHMENOT_INTERNAL_TMP}/${base}.XXXXXXXXXX" 31 | fi 32 | 33 | local tmp_dir 34 | if ! tmp_dir=$( mktemp -du "${template}" ); then 35 | log_error 'Failed to create temporary directory' 36 | return 1 37 | fi 38 | 39 | echo "${tmp_dir}" 40 | } 41 | 42 | 43 | get_size () { 44 | local thing 45 | expect_args thing -- "$@" 46 | 47 | du -sh "${thing}" | 48 | awk '{ print $1 }' | 49 | sed 's/K$/KB/;s/M$/MB/;s/G$/GB/' || return 1 50 | } 51 | 52 | 53 | case $( uname -s ) in 54 | 'Linux') 55 | get_modification_time () { 56 | local thing 57 | expect_args thing -- "$@" 58 | 59 | stat -c "%Y" "${thing}" || return 1 60 | } 61 | ;; 62 | *) 63 | get_modification_time () { 64 | local thing 65 | expect_args thing -- "$@" 66 | 67 | stat -f "%m" "${thing}" || return 1 68 | } 69 | esac 70 | 71 | 72 | get_dir_path () { 73 | local dir 74 | expect_args dir -- "$@" 75 | 76 | expect_existing "${dir}" || return 1 77 | 78 | ( cd "${dir}" && pwd -P ) || return 1 79 | } 80 | 81 | 82 | get_dir_name () { 83 | local dir 84 | expect_args dir -- "$@" 85 | 86 | expect_existing "${dir}" || return 1 87 | 88 | local path 89 | path=$( get_dir_path "${dir}" ) || return 1 90 | 91 | basename "${path}" || return 1 92 | } 93 | 94 | 95 | # TODO: Use realpath instead of readlink. 96 | case $( uname -s ) in 97 | 'Linux') 98 | get_link_path () { 99 | local link 100 | expect_args link -- "$@" 101 | 102 | readlink -m "${link}" || return 1 103 | } 104 | ;; 105 | *) 106 | get_link_path () { 107 | local link 108 | expect_args link -- "$@" 109 | 110 | greadlink -m "${link}" || return 1 111 | } 112 | esac 113 | 114 | 115 | find_tree () { 116 | local dir 117 | expect_args dir -- "$@" 118 | shift 119 | 120 | if [[ ! -d "${dir}" ]]; then 121 | return 0 122 | fi 123 | 124 | ( cd "${dir}" && find '.' "$@" 2>'/dev/null' ) | 125 | sed 's:^\./::' || return 0 126 | } 127 | 128 | 129 | find_added () { 130 | local old_dir new_dir 131 | expect_args old_dir new_dir -- "$@" 132 | shift 2 133 | 134 | local new_file 135 | find "${new_dir}" "$@" -type f -print0 2>'/dev/null' | 136 | sort0_natural | 137 | while read -rd $'\0' new_file; do 138 | local path old_file 139 | path="${new_file##${new_dir}/}" 140 | old_file="${old_dir}/${path}" 141 | 142 | if [[ ! -f "${old_file}" ]]; then 143 | echo "${path}" 144 | fi 145 | done || return 0 146 | } 147 | 148 | 149 | find_changed () { 150 | local old_dir new_dir 151 | expect_args old_dir new_dir -- "$@" 152 | shift 2 153 | 154 | local new_file 155 | find "${new_dir}" "$@" -type f -print0 2>'/dev/null' | 156 | sort0_natural | 157 | while read -rd $'\0' new_file; do 158 | local path old_file 159 | path="${new_file##${new_dir}/}" 160 | old_file="${old_dir}/${path}" 161 | 162 | if [[ -f "${old_file}" ]] && ! cmp -s "${old_file}" "${new_file}"; then 163 | echo "${path}" 164 | fi 165 | done || return 0 166 | } 167 | 168 | 169 | find_not_changed () { 170 | local old_dir new_dir 171 | expect_args old_dir new_dir -- "$@" 172 | shift 2 173 | 174 | local new_file 175 | find "${new_dir}" "$@" -type f -print0 2>'/dev/null' | 176 | sort0_natural | 177 | while read -rd $'\0' new_file; do 178 | local path old_file 179 | path="${new_file##${new_dir}/}" 180 | old_file="${old_dir}/${path}" 181 | 182 | if [[ -f "${old_file}" ]] && cmp -s "${old_file}" "${new_file}"; then 183 | echo "${path}" 184 | fi 185 | done || return 0 186 | } 187 | 188 | 189 | find_removed () { 190 | local old_dir new_dir 191 | expect_args old_dir new_dir -- "$@" 192 | shift 2 193 | 194 | local old_file 195 | find "${old_dir}" "$@" -type f -print0 2>'/dev/null' | 196 | sort0_natural | 197 | while read -rd $'\0' old_file; do 198 | local path new_file 199 | path="${old_file##${old_dir}/}" 200 | new_file="${new_dir}/${path}" 201 | 202 | if [[ ! -f "${new_file}" ]]; then 203 | echo "${path}" 204 | fi 205 | done || return 0 206 | } 207 | 208 | 209 | compare_tree () { 210 | local old_dir new_dir 211 | expect_args old_dir new_dir -- "$@" 212 | shift 2 213 | 214 | ( 215 | find_added "${old_dir}" "${new_dir}" "$@" | sed 's/^/+ /' 216 | find_changed "${old_dir}" "${new_dir}" "$@" | sed 's/^/* /' 217 | find_not_changed "${old_dir}" "${new_dir}" "$@" | sed 's/^/= /' 218 | find_removed "${old_dir}" "${new_dir}" "$@" | sed 's/^/- /' 219 | ) | 220 | sort_do -k 2 || return 0 221 | } 222 | 223 | 224 | expand_glob () { 225 | local dir glob 226 | expect_args dir glob -- "$@" 227 | 228 | expect_existing "${dir}" || return 1 229 | 230 | # TODO: Use $'\0' as delimiter. 231 | 232 | ( 233 | local -a files_a 234 | cd "${dir}" && 235 | IFS=$'\n' && files_a=( ${glob} ) && 236 | echo "${files_a[*]}" 237 | ) || return 1 238 | } 239 | -------------------------------------------------------------------------------- /src/git.sh: -------------------------------------------------------------------------------- 1 | validate_git_url () { 2 | local url 3 | expect_args url -- "$@" 4 | 5 | case "${url}" in 6 | 'https://'*) return 0;; 7 | 'ssh://'*) return 0;; 8 | 'git@'*) return 0;; 9 | 'file://'*) return 0;; 10 | 'http://'*) return 0;; 11 | 'git://'*) return 0;; 12 | *) return 1 13 | esac 14 | } 15 | 16 | 17 | git_do () { 18 | local work_dir cmd 19 | expect_args work_dir cmd -- "$@" 20 | shift 2 21 | 22 | expect_existing "${work_dir}" || return 1 23 | 24 | ( 25 | cd "${work_dir}" && 26 | git "${cmd}" "$@" 27 | ) || return 1 28 | } 29 | 30 | 31 | quiet_git_do () { 32 | local work_dir cmd 33 | expect_args work_dir cmd -- "$@" 34 | shift 2 35 | 36 | expect_existing "${work_dir}" || return 1 37 | 38 | git_do "${work_dir}" "${cmd}" "$@" >'/dev/null' 2>&1 || return 1 39 | } 40 | 41 | 42 | hash_newest_git_commit () { 43 | local dir 44 | expect_args dir -- "$@" 45 | 46 | expect_existing "${dir}" || return 1 47 | 48 | local commit_hash 49 | if ! commit_hash=$( git_do "${dir}" log -n 1 --pretty='format:%h' 2>'/dev/null' ); then 50 | return 0 51 | fi 52 | 53 | echo "${commit_hash}" 54 | } 55 | 56 | 57 | git_clone_over () { 58 | local url dir 59 | expect_args url dir -- "$@" 60 | 61 | local work_dir base_url branch 62 | work_dir=$( dirname "${dir}" ) || return 1 63 | base_url="${url%#*}" 64 | branch="${url#*#}" 65 | if [[ "${branch}" == "${base_url}" ]]; then 66 | branch='master'; 67 | fi 68 | 69 | rm -rf "${dir}" || return 1 70 | mkdir -p "${work_dir}" || return 1 71 | quiet_git_do "${work_dir}" clone "${base_url}" "${dir}" || return 1 72 | 73 | local commit_hash 74 | commit_hash=$( hash_newest_git_commit "${dir}" ) || return 1 75 | if [[ -n "${commit_hash}" ]]; then 76 | quiet_git_do "${dir}" checkout "${branch}" || return 1 77 | quiet_git_do "${dir}" submodule update --init --recursive || return 1 78 | fi 79 | 80 | hash_newest_git_commit "${dir}" || return 1 81 | } 82 | 83 | 84 | git_update_into () { 85 | local url dir 86 | expect_args url dir -- "$@" 87 | 88 | expect_existing "${dir}" || return 1 89 | 90 | local base_url branch 91 | base_url="${url%#*}" 92 | branch="${url#*#}" 93 | if [[ "${branch}" == "${base_url}" ]]; then 94 | branch='master'; 95 | fi 96 | 97 | local old_url 98 | old_url=$( git_do "${dir}" config --get 'remote.origin.url' ) || return 1 99 | if [[ "${old_url}" != "${base_url}" ]]; then 100 | git_do "${dir}" remote set-url 'origin' "${base_url}" || return 1 101 | fi 102 | 103 | quiet_git_do "${dir}" fetch 'origin' || return 1 104 | quiet_git_do "${dir}" fetch --tags 'origin' || return 1 105 | quiet_git_do "${dir}" reset --hard "origin/${branch}" || return 1 106 | quiet_git_do "${dir}" submodule update --init --recursive || return 1 107 | 108 | hash_newest_git_commit "${dir}" || return 1 109 | } 110 | 111 | 112 | git_acquire () { 113 | local src_dir thing dst_dir 114 | expect_args src_dir thing dst_dir -- "$@" 115 | 116 | local name 117 | if validate_git_url "${thing}"; then 118 | name=$( basename "${thing%.git}" ) || return 1 119 | 120 | local commit_hash 121 | if [[ ! -d "${dst_dir}/${name}" ]]; then 122 | log_begin "Cloning ${thing}..." 123 | 124 | if ! commit_hash=$( git_clone_over "${thing}" "${dst_dir}/${name}" ); then 125 | log_end 'error' 126 | return 1 127 | fi 128 | else 129 | log_begin "Updating ${thing}..." 130 | 131 | if ! commit_hash=$( git_update_into "${thing}" "${dst_dir}/${name}" ); then 132 | log_end 'error' 133 | return 1 134 | fi 135 | fi 136 | log_end "done, ${commit_hash}" 137 | else 138 | name=$( get_dir_name "${src_dir}/${thing}" ) || return 1 139 | 140 | copy_dir_over "${src_dir}/${thing}" "${dst_dir}/${name}" || return 1 141 | fi 142 | 143 | echo "${name}" 144 | } 145 | 146 | 147 | git_acquire_all () { 148 | local src_dir things dst_dir 149 | expect_args src_dir things dst_dir -- "$@" 150 | 151 | if [[ -z "${things}" ]]; then 152 | return 0 153 | fi 154 | 155 | local -a names_a 156 | local thing 157 | names_a=() 158 | while read -r thing; do 159 | local name 160 | name=$( git_acquire "${src_dir}" "${thing}" "${dst_dir}" ) || return 1 161 | names_a+=( "${name}" ) 162 | done <<<"${things}" 163 | 164 | IFS=$'\n' && echo "${names_a[*]}" 165 | } 166 | -------------------------------------------------------------------------------- /src/hash.sh: -------------------------------------------------------------------------------- 1 | get_hash () { 2 | local input 3 | input=$( cat ) || true 4 | 5 | if [[ -z "${input}" ]]; then 6 | return 0 7 | fi 8 | 9 | openssl sha1 <<<"${input}" | 10 | sed 's/^.* //' || return 1 11 | } 12 | 13 | 14 | hash_tree () { 15 | local dir 16 | expect_args dir -- "$@" 17 | shift 18 | 19 | if [[ ! -d "${dir}" ]]; then 20 | return 0 21 | fi 22 | 23 | ( 24 | cd "${dir}" && 25 | find '.' "$@" -type f -exec openssl sha1 '{}' ';' 2>'/dev/null' 26 | ) | 27 | sort_natural | 28 | get_hash || return 1 29 | } 30 | -------------------------------------------------------------------------------- /src/line.sh: -------------------------------------------------------------------------------- 1 | filter_first () { 2 | head -n 1 || return 0 3 | } 4 | 5 | 6 | filter_not_first () { 7 | sed '1d' || return 0 8 | } 9 | 10 | 11 | filter_last () { 12 | tail -n 1 || return 0 13 | } 14 | 15 | 16 | filter_not_last () { 17 | sed '$d' || return 0 18 | } 19 | 20 | 21 | filter_matching () { 22 | local pattern 23 | expect_args pattern -- "$@" 24 | 25 | awk '/'"${pattern//\//\\/}"'/ { print }' || return 0 26 | } 27 | 28 | 29 | filter_not_matching () { 30 | local pattern 31 | expect_args pattern -- "$@" 32 | 33 | awk '!/'"${pattern//\//\\/}"'/ { print }' || return 0 34 | } 35 | 36 | 37 | match_at_most_one () { 38 | awk ' NR == 1 { line = $0 "\n" } 39 | NR == 2 { line = ""; exit 1 } 40 | END { printf line }' || return 1 41 | } 42 | 43 | 44 | match_at_least_one () { 45 | grep '.' || return 1 46 | } 47 | 48 | 49 | match_exactly_one () { 50 | match_at_most_one | match_at_least_one || return 1 51 | } 52 | 53 | 54 | strip_trailing_newline () { 55 | awk 'NR > 1 { printf "\n" } { printf "%s", $0 }' || return 0 56 | } 57 | -------------------------------------------------------------------------------- /src/log.sh: -------------------------------------------------------------------------------- 1 | bashmenot_internal_get_timestamp () { 2 | if (( ${BASHMENOT_LOG_TIMESTAMP:-0} )); then 3 | if [[ -z "${BASHMENOT_TIMESTAMP_EPOCH:-}" ]]; then 4 | local now 5 | now=$( get_date '+%H:%M:%S' ) 6 | 7 | echo -e "\033[2m${now}\033[0m " 8 | else 9 | local now diff pad 10 | now=$( get_current_time ) 11 | diff=$(( now - BASHMENOT_TIMESTAMP_EPOCH )) 12 | pad=' ' 13 | 14 | echo -e "\033[2m${pad:0:$(( 10 - ${#diff} ))}${diff}\033[0m " 15 | fi 16 | fi 17 | } 18 | 19 | 20 | bashmenot_internal_get_empty_timestamp () { 21 | if (( ${BASHMENOT_LOG_TIMESTAMP:-0} )); then 22 | if [[ -z "${BASHMENOT_TIMESTAMP_EPOCH:-}" ]]; then 23 | echo ' ' 24 | else 25 | echo ' ' 26 | fi 27 | fi 28 | } 29 | 30 | 31 | prefix_log () { 32 | local now prefix 33 | now=$( bashmenot_internal_get_timestamp ) 34 | prefix="$1" 35 | shift 36 | 37 | echo "${now}${*:+${prefix}$*}" >&2 38 | } 39 | 40 | 41 | prefix_log_begin () { 42 | local now prefix 43 | now=$( bashmenot_internal_get_timestamp ) 44 | prefix="$1" 45 | shift 46 | 47 | printf -- "${now}${*:+${prefix}$* }" >&2 48 | } 49 | 50 | 51 | log () { 52 | prefix_log '-----> ' "$@" 53 | } 54 | 55 | 56 | log_begin () { 57 | prefix_log_begin '-----> ' "$@" 58 | } 59 | 60 | 61 | log_end () { 62 | echo "$@" >&2 63 | } 64 | 65 | 66 | log_indent () { 67 | prefix_log ' ' "$@" 68 | } 69 | 70 | 71 | log_indent_begin () { 72 | prefix_log_begin ' ' "$@" 73 | } 74 | 75 | 76 | log_indent_end () { 77 | echo "$@" >&2 78 | } 79 | 80 | 81 | log_label () { 82 | local label 83 | label="$1$( printf ' %.0s' {0..41} )" 84 | shift 85 | 86 | log "${label:0:41}" "$( echo -en '\033[1m' )$*$( echo -en '\033[0m' )" 87 | } 88 | 89 | 90 | log_indent_label () { 91 | local label 92 | label="$1$( printf ' %.0s' {0..41} )" 93 | shift 94 | 95 | log_indent "${label:0:41}" "$( echo -en '\033[1m' )$*$( echo -en '\033[0m' )" 96 | } 97 | 98 | 99 | log_debug () { 100 | prefix_log "$( echo -en '\033[1m' ) *** DEBUG: " "$*$( echo -en '\033[0m' )" 101 | } 102 | 103 | 104 | log_warning () { 105 | prefix_log "$( echo -en '\033[1m' ) *** WARNING: " "$*$( echo -en '\033[0m' )" 106 | } 107 | 108 | 109 | log_error () { 110 | prefix_log "$( echo -en '\033[1m' ) *** ERROR: " "$*$( echo -en '\033[0m' )" 111 | } 112 | 113 | 114 | case $( uname -s ) in 115 | 'Linux') 116 | quote () { 117 | local prefix 118 | prefix="$( bashmenot_internal_get_empty_timestamp ) " 119 | 120 | sed -u "s/^/${prefix}/" >&2 || return 0 121 | } 122 | ;; 123 | 'Darwin') 124 | quote () { 125 | local prefix 126 | prefix="$( bashmenot_internal_get_empty_timestamp ) " 127 | 128 | sed -l "s/^/${prefix}/" >&2 || return 0 129 | } 130 | ;; 131 | *) 132 | quote () { 133 | local prefix 134 | prefix="$( bashmenot_internal_get_empty_timestamp ) " 135 | 136 | sed "s/^/${prefix}/" >&2 || return 0 137 | } 138 | esac 139 | 140 | 141 | die () { 142 | if [[ -n "${*:+_}" ]]; then 143 | log_error "$@" 144 | fi 145 | 146 | exit 1 147 | } 148 | -------------------------------------------------------------------------------- /src/package.sh: -------------------------------------------------------------------------------- 1 | fix_broken_links () { 2 | local dst_dir 3 | expect_args dst_dir -- "$@" 4 | 5 | expect_existing "${dst_dir}" || return 1 6 | 7 | local link 8 | find_tree "${dst_dir}" -type l -print0 | 9 | sort0_natural | 10 | while read -rd $'\0' link; do 11 | local link_dir link_name src_original src_canonical src_name 12 | link_dir=$( dirname "${dst_dir}/${link}" ) || return 1 13 | link_name=$( basename "${link}" ) || return 1 14 | src_original=$( readlink "${dst_dir}/${link}" ) || return 1 15 | src_canonical=$( get_link_path "${dst_dir}/${link}" ) || return 1 16 | src_name=$( basename "${src_canonical}" ) || return 1 17 | 18 | if [[ ! -e "${src_canonical}" ]]; then 19 | rm -f "${dst_dir}/${link}" || return 1 20 | 21 | local target 22 | if target=$( find_tree "${link_dir}" -name "${src_name}" | match_exactly_one ); then 23 | log_indent "Fixing broken link: ${link_name} -> ${src_name} (${src_original})" 24 | ln -fs "${target}" "${dst_dir}/${link}" || return 1 25 | else 26 | log_warning "Broken link: ${dst_dir}/${link} -> ${src_original}" 27 | fi 28 | fi 29 | done || return 0 30 | } 31 | 32 | 33 | install_deb_package () { 34 | local package_file dst_dir 35 | expect_args package_file dst_dir -- "$@" 36 | 37 | expect_existing "${package_file}" || return 1 38 | 39 | local package_name src_dir 40 | package_name=$( basename "${package_file}" ) || return 1 41 | src_dir=$( get_tmp_dir 'deb' ) || return 1 42 | 43 | log "Installing OS package: ${package_name}" 44 | 45 | dpkg --extract "${package_file}" "${src_dir}" 2>&1 | quote || return 1 46 | 47 | copy_dir_into "${src_dir}" "${dst_dir}" || return 1 48 | rm -rf "${src_dir}" || return 1 49 | } 50 | 51 | 52 | install_rpm_package () { 53 | local package_file dst_dir 54 | expect_args package_file dst_dir -- "$@" 55 | 56 | expect_existing "${package_file}" || return 1 57 | 58 | local package_name src_dir 59 | package_name=$( basename "${package_file}" ) || return 1 60 | src_dir=$( get_tmp_dir 'rpm' ) || return 1 61 | 62 | log "Installing OS package: ${package_name}" 63 | 64 | mkdir -p "${src_dir}" || return 1 65 | ( 66 | cd "${src_dir}" && 67 | rpm2cpio "${package_file}" | cpio --extract --make-directories >'/dev/null' 2>&1 68 | ) || return 1 69 | 70 | copy_dir_into "${src_dir}" "${dst_dir}" || return 1 71 | rm -rf "${src_dir}" || return 1 72 | } 73 | 74 | 75 | install_debian_packages () { 76 | local names dst_dir 77 | expect_args names dst_dir -- "$@" 78 | 79 | if [[ -z "${names}" ]]; then 80 | return 0 81 | fi 82 | 83 | local apt_dir 84 | if [[ -z "${BASHMENOT_APT_DIR:-}" ]]; then 85 | apt_dir=$( get_tmp_dir 'apt' ) || return 1 86 | else 87 | apt_dir="${BASHMENOT_APT_DIR}" 88 | fi 89 | 90 | local -a opts_a 91 | opts_a+=( -o debug::nolocking='true' ) 92 | opts_a+=( -o dir::cache="${apt_dir}/cache" ) 93 | opts_a+=( -o dir::state="${apt_dir}/state" ) 94 | 95 | local must_update 96 | must_update=1 97 | if [[ -d "${apt_dir}" ]]; then 98 | local now candidate_time 99 | now=$( get_current_time ) 100 | if candidate_time=$( get_modification_time "${apt_dir}" ) && 101 | (( candidate_time + 3600 >= now )) 102 | then 103 | must_update=0 104 | fi 105 | else 106 | rm -rf "${apt_dir}" || return 1 107 | fi 108 | 109 | rm -rf "${apt_dir}/cache/archives" || return 1 110 | mkdir -p "${apt_dir}/cache/archives/partial" "${apt_dir}/state/lists/partial" || return 1 111 | 112 | if (( must_update )); then 113 | apt-get "${opts_a[@]}" update 2>&1 | quote || return 1 114 | 115 | touch "${apt_dir}" || return 1 116 | fi 117 | 118 | local name 119 | while read -r name; do 120 | mkdir -p "${apt_dir}/cache/archives/partial" || return 1 121 | 122 | apt-get "${opts_a[@]}" install --download-only --reinstall --yes "${name}" 2>&1 | quote || return 1 123 | 124 | local file 125 | find_tree "${apt_dir}/cache/archives" -type f -name '*.deb' -print0 | 126 | sort0_natural | 127 | while read -rd $'\0' file; do 128 | install_deb_package "${apt_dir}/cache/archives/${file}" "${dst_dir}" || return 1 129 | done 130 | 131 | rm -rf "${apt_dir}/cache/archives" || return 1 132 | done <<<"${names}" 133 | 134 | if [[ -z "${BASHMENOT_APT_DIR:-}" ]]; then 135 | rm -rf "${apt_dir}" || return 1 136 | fi 137 | 138 | fix_broken_links "${dst_dir}" || return 1 139 | } 140 | 141 | 142 | install_redhat_packages () { 143 | local names dst_dir 144 | expect_args names dst_dir -- "$@" 145 | 146 | if [[ -z "${names}" ]]; then 147 | return 0 148 | fi 149 | 150 | local yum_dir 151 | yum_dir=$( get_tmp_dir 'yum' ) || return 1 152 | 153 | local -a opts_a 154 | opts_a+=( --assumeyes ) 155 | opts_a+=( --downloadonly ) 156 | opts_a+=( --downloaddir="${yum_dir}" ) 157 | 158 | # NOTE: In old versions of yum, the --downloadonly option is 159 | # provided by yum-plugin-downloadonly, which must be installed 160 | # manually, and which causes yum to always return failure. 161 | 162 | local platform no_status 163 | platform=$( detect_platform ) 164 | no_status=0 165 | if [[ "${platform}" =~ 'linux-centos-6'* ]]; then 166 | no_status=1 167 | fi 168 | 169 | local name 170 | while read -r name; do 171 | local status 172 | status=0 173 | 174 | if ! yum list installed "${name}" >'/dev/null' 2>&1; then 175 | if ! yum install "${opts_a[@]}" "${name}" 2>&1 | quote; then 176 | status=1 177 | fi 178 | elif ! yum reinstall "${opts_a[@]}" "${name}" 2>&1 | quote; then 179 | status=1 180 | fi 181 | if ! (( no_status )) && (( status )); then 182 | return 1 183 | fi 184 | 185 | local file 186 | find_tree "${yum_dir}" -type f -name '*.rpm' -print0 | 187 | sort0_natural | 188 | while read -rd $'\0' file; do 189 | install_rpm_package "${yum_dir}/${file}" "${dst_dir}" || return 1 190 | done 191 | 192 | rm -rf "${yum_dir}" || return 1 193 | done <<<"${names}" 194 | 195 | fix_broken_links "${dst_dir}" || return 1 196 | } 197 | 198 | 199 | install_platform_packages () { 200 | local specs dst_dir 201 | expect_args specs dst_dir -- "$@" 202 | 203 | if [[ -z "${specs}" ]]; then 204 | return 0 205 | fi 206 | 207 | local platform 208 | platform=$( detect_platform ) 209 | 210 | local -a names_a 211 | local spec 212 | while read -r spec; do 213 | local pattern name 214 | pattern="${spec%:*}" 215 | name="${spec#*:}" 216 | if [[ "${pattern}" == "${name}" || "${platform}" =~ ${pattern} ]]; then 217 | names_a+=( "${name}" ) 218 | fi 219 | done <<<"${specs}" 220 | if [[ -z "${names_a[@]:+_}" ]]; then 221 | return 0 222 | fi 223 | 224 | local names 225 | names=$( IFS=$'\n' && echo "${names_a[*]}" ) 226 | 227 | if is_debian_like "${platform}"; then 228 | install_debian_packages "${names}" "${dst_dir}" || return 1 229 | elif is_redhat_like "${platform}"; then 230 | install_redhat_packages "${names}" "${dst_dir}" || return 1 231 | else 232 | local description 233 | description=$( format_platform_description "${platform}" ) 234 | 235 | log_error "Unexpected platform: ${description}" 236 | return 1 237 | fi 238 | } 239 | -------------------------------------------------------------------------------- /src/platform.sh: -------------------------------------------------------------------------------- 1 | format_platform_description () { 2 | case "$1" in 3 | 'freebsd-10.0-x86_64') echo 'FreeBSD 10.0 (x86_64)';; 4 | 'freebsd-10.1-x86_64') echo 'FreeBSD 10.1 (x86_64)';; 5 | 'linux-amzn-2014.09-x86_64') echo 'Amazon Linux 2014.09 (x86_64)';; 6 | 'linux-arch-x86_64') echo 'Arch Linux (x86_64)';; 7 | 'linux-centos-6-i386') echo 'CentOS 6 (i386)';; 8 | 'linux-centos-6-x86_64') echo 'CentOS 6 (x86_64)';; 9 | 'linux-centos-7-i386') echo 'CentOS 7 (i386)';; 10 | 'linux-centos-7-x86_64') echo 'CentOS 7 (x86_64)';; 11 | 'linux-debian-6-i386') echo 'Debian 6 (i386)';; 12 | 'linux-debian-6-x86_64') echo 'Debian 6 (x86_64)';; 13 | 'linux-debian-7-i386') echo 'Debian 7 (i386)';; 14 | 'linux-debian-7-x86_64') echo 'Debian 7 (x86_64)';; 15 | 'linux-debian-8-i386') echo 'Debian 8 (i386)';; 16 | 'linux-debian-8-x86_64') echo 'Debian 8 (x86_64)';; 17 | 'linux-exherbo-x86_64') echo 'Exherbo Linux (x86_64)';; 18 | 'linux-fedora-19-i386') echo 'Fedora 19 (i386)';; 19 | 'linux-fedora-19-x86_64') echo 'Fedora 19 (x86_64)';; 20 | 'linux-fedora-20-i386') echo 'Fedora 20 (i386)';; 21 | 'linux-fedora-20-x86_64') echo 'Fedora 20 (x86_64)';; 22 | 'linux-fedora-21-x86_64') echo 'Fedora 21 (x86_64)';; 23 | 'linux-gentoo-x86_64') echo 'Gentoo Linux (x86_64)';; 24 | 'linux-opensuse-13.2-x86_64') echo 'openSUSE 13.2 (x86_64)';; 25 | 'linux-rhel-6-i386') echo 'Red Hat Enterprise Linux 6 (i386)';; 26 | 'linux-rhel-6-x86_64') echo 'Red Hat Enterprise Linux 6 (x86_64)';; 27 | 'linux-rhel-7-x86_64') echo 'Red Hat Enterprise Linux 7 (x86_64)';; 28 | 'linux-slackware-14.1-x86_64') echo 'Slackware 14.1 (x86_64)';; 29 | 'linux-sles-11-i386') echo 'SUSE Linux Enterprise Server 11 (i386)';; 30 | 'linux-sles-11-x86_64') echo 'SUSE Linux Enterprise Server 11 (x86_64)';; 31 | 'linux-sles-12-x86_64') echo 'SUSE Linux Enterprise Server 12 (x86_64)';; 32 | 'linux-ubuntu-10.04-i386') echo 'Ubuntu 10.04 LTS (i386)';; 33 | 'linux-ubuntu-10.04-x86_64') echo 'Ubuntu 10.04 LTS (x86_64)';; 34 | 'linux-ubuntu-12.04-i386') echo 'Ubuntu 12.04 LTS (i386)';; 35 | 'linux-ubuntu-12.04-x86_64') echo 'Ubuntu 12.04 LTS (x86_64)';; 36 | 'linux-ubuntu-14.04-i386') echo 'Ubuntu 14.04 LTS (i386)';; 37 | 'linux-ubuntu-14.04-x86_64') echo 'Ubuntu 14.04 LTS (x86_64)';; 38 | 'linux-ubuntu-14.10-i386') echo 'Ubuntu 14.10 (i386)';; 39 | 'linux-ubuntu-14.10-x86_64') echo 'Ubuntu 14.10 (x86_64)';; 40 | 'linux-ubuntu-15.04-i386') echo 'Ubuntu 15.04 (i386)';; 41 | 'linux-ubuntu-15.04-x86_64') echo 'Ubuntu 15.04 (x86_64)';; 42 | 'osx-10.6-x86_64') echo 'OS X 10.6 (x86_64)';; 43 | 'osx-10.7-x86_64') echo 'OS X 10.7 (x86_64)';; 44 | 'osx-10.8-x86_64') echo 'OS X 10.8 (x86_64)';; 45 | 'osx-10.9-x86_64') echo 'OS X 10.9 (x86_64)';; 46 | 'osx-10.10-x86_64') echo 'OS X 10.10 (x86_64)';; 47 | *) echo 'unknown' 48 | esac 49 | } 50 | 51 | 52 | is_debian_like () { 53 | case "$1" in 54 | 'linux-debian-'*) return 0;; 55 | 'linux-ubuntu-'*) return 0;; 56 | *) return 1 57 | esac 58 | } 59 | 60 | 61 | is_redhat_like () { 62 | case "$1" in 63 | 'linux-amzn-'*) return 0;; 64 | 'linux-centos-'*) return 0;; 65 | 'linux-fedora-'*) return 0;; 66 | 'linux-rhel-'*) return 0;; 67 | *) return 1 68 | esac 69 | } 70 | 71 | 72 | detect_os () { 73 | local raw_os 74 | raw_os=$( uname -s ) || true 75 | 76 | case "${raw_os}" in 77 | 'FreeBSD') echo 'freebsd';; 78 | 'Linux') echo 'linux';; 79 | 'Darwin') echo 'osx';; 80 | *) echo 'unknown' 81 | esac 82 | } 83 | 84 | 85 | detect_arch () { 86 | local raw_arch 87 | raw_arch=$( uname -m | tr '[:upper:]' '[:lower:]' ) || true 88 | 89 | case "${raw_arch}" in 90 | 'amd64') echo 'x86_64';; 91 | 'i686') echo 'i386';; 92 | 'x64') echo 'x86_64';; 93 | 'x86-64') echo 'x86_64';; 94 | 'x86_64') echo 'x86_64';; 95 | *) echo 'unknown' 96 | esac 97 | } 98 | 99 | 100 | bashmenot_internal_detect_linux_label () { 101 | local label raw_label 102 | label='' 103 | 104 | if [[ -f '/etc/os-release' ]]; then 105 | label=$( awk -F= '/^ID=/ { print $2 }' <'/etc/os-release' ) || true 106 | fi 107 | if [[ -z "${label}" && -f '/etc/lsb-release' ]]; then 108 | label=$( awk -F= '/^DISTRIB_ID=/ { print $2 }' <'/etc/lsb-release' ) || true 109 | fi 110 | if [[ -z "${label}" && -f '/etc/centos-release' ]]; then 111 | label='centos' 112 | fi 113 | if [[ -z "${label}" && -f '/etc/debian_version' ]]; then 114 | label='debian' 115 | fi 116 | if [[ -z "${label}" && -f '/etc/redhat-release' ]]; then 117 | raw_label=$( <'/etc/redhat-release' ) || true 118 | case "${raw_label}" in 119 | 'CentOS'*) 120 | label='centos';; 121 | 'Red Hat Enterprise Linux Server'*) 122 | label='rhel';; 123 | *) 124 | true 125 | esac 126 | fi 127 | if [[ -z "${label}" && -f '/etc/SuSE-release' ]]; then 128 | raw_label=$( <'/etc/SuSE-release' ) || true 129 | case "${raw_label}" in 130 | 'SUSE Linux Enterprise Server'*) 131 | label='sles';; 132 | *) 133 | true 134 | esac 135 | fi 136 | 137 | echo "${label}" 138 | } 139 | 140 | 141 | bashmenot_internal_detect_linux_version () { 142 | local version raw_version 143 | version='' 144 | 145 | if [[ -f '/etc/os-release' ]]; then 146 | version=$( awk -F= '/^VERSION_ID=/ { print $2 }' <'/etc/os-release' ) || true 147 | fi 148 | if [[ -z "${version}" && -f '/etc/lsb-release' ]]; then 149 | version=$( awk -F= '/^DISTRIB_RELEASE=/ { print $2 }' <'/etc/lsb-release' ) || true 150 | fi 151 | if [[ -z "${version}" && -f '/etc/centos-release' ]]; then 152 | raw_version=$( <'/etc/centos-release' ) || true 153 | case "${raw_version}" in 154 | 'CentOS release 6'*) 155 | version='6';; 156 | 'CentOS Linux release 7'*) 157 | version='7';; 158 | *) 159 | true 160 | esac 161 | fi 162 | if [[ -z "${version}" && -f '/etc/debian_version' ]]; then 163 | version=$( sed 's/^\([0-9]*\).*$/\1/' <'/etc/debian_version' ) || true 164 | fi 165 | if [[ -z "${version}" && -f '/etc/redhat-release' ]]; then 166 | raw_version=$( <'/etc/redhat-release' ) || true 167 | case "${raw_version}" in 168 | 'Red Hat Enterprise Linux Server release 5'*) 169 | version='5';; 170 | 'Red Hat Enterprise Linux Server release 6'*) 171 | version='6';; 172 | *) 173 | true 174 | esac 175 | fi 176 | if [[ -z "${version}" && -f '/etc/SuSE-release' ]]; then 177 | raw_version=$( <'/etc/SuSE-release' ) || true 178 | case "${raw_version}" in 179 | 'SUSE Linux Enterprise Server 11'*) 180 | version='11';; 181 | *) 182 | true 183 | esac 184 | fi 185 | 186 | echo "${version}" 187 | } 188 | 189 | 190 | detect_platform () { 191 | local os arch 192 | os=$( detect_os ) 193 | arch=$( detect_arch ) 194 | 195 | local raw_label raw_version 196 | raw_label='' 197 | raw_version='' 198 | case "${os}" in 199 | 'freebsd') 200 | raw_version=$( uname -r | awk -F- '{ print $1 }' ) || true 201 | ;; 202 | 'linux') 203 | raw_label=$( bashmenot_internal_detect_linux_label ) || true 204 | raw_version=$( bashmenot_internal_detect_linux_version ) || true 205 | ;; 206 | 'osx') 207 | raw_version=$( sw_vers -productVersion ) || true 208 | ;; 209 | *) 210 | true 211 | esac 212 | 213 | local label version 214 | label=$( tr -dc '[:alpha:]' <<<"${raw_label}" | tr '[:upper:]' '[:lower:]' ) || true 215 | version=$( tr -dc '[:digit:]\.' <<<"${raw_version}" | sed 's/^\([0-9]*\.[0-9]*\).*$/\1/' ) || true 216 | if [[ "${label}" == 'rhel' ]]; then 217 | version="${version%%.*}" 218 | fi 219 | 220 | echo "${os}${label:+-${label}}${version:+-${version}}${arch:+-${arch}}" 221 | } 222 | -------------------------------------------------------------------------------- /src/s3.sh: -------------------------------------------------------------------------------- 1 | format_s3_url () { 2 | local resource 3 | expect_args resource -- "$@" 4 | 5 | local endpoint 6 | endpoint="${BASHMENOT_S3_ENDPOINT:-s3.amazonaws.com}" 7 | 8 | echo "https://${endpoint}${resource}" 9 | } 10 | 11 | 12 | read_s3_listing_xml () { 13 | IFS='>' 14 | 15 | local element contents 16 | while read -rd '<' element contents; do 17 | if [[ "${element}" == 'Key' ]]; then 18 | echo "${contents}" 19 | fi 20 | done || return 0 21 | } 22 | 23 | 24 | s3_do () { 25 | local url 26 | expect_args url -- "$@" 27 | shift 28 | 29 | local endpoint date 30 | endpoint="${BASHMENOT_S3_ENDPOINT:-s3.amazonaws.com}" 31 | date=$( get_http_date ) 32 | 33 | if (( ${BASHMENOT_NO_S3_AUTH:-0} )); then 34 | curl_do "${url}" \ 35 | --header "Host: ${endpoint}" \ 36 | --header "Date: ${date}" \ 37 | "$@" || return 38 | return 0 39 | fi 40 | expect_vars BASHMENOT_AWS_ACCESS_KEY_ID BASHMENOT_AWS_SECRET_ACCESS_KEY 41 | 42 | local signature 43 | signature=$( 44 | sed "s/S3_DATE/${date}/" | 45 | strip_trailing_newline | 46 | openssl sha1 -hmac "${BASHMENOT_AWS_SECRET_ACCESS_KEY}" -binary | 47 | openssl base64 48 | ) || return 1 49 | 50 | local auth 51 | auth="AWS ${BASHMENOT_AWS_ACCESS_KEY_ID}:${signature}" 52 | 53 | curl_do "${url}" \ 54 | --header "Host: ${endpoint}" \ 55 | --header "Date: ${date}" \ 56 | --header "Authorization: ${auth}" \ 57 | "$@" || return 58 | } 59 | 60 | 61 | s3_download () { 62 | local src_bucket src_object dst_file 63 | expect_args src_bucket src_object dst_file -- "$@" 64 | 65 | local src_resource 66 | src_resource="/${src_bucket}/${src_object}" 67 | 68 | log_indent_begin "Downloading s3:/${src_resource}..." 69 | 70 | local src_url dst_dir 71 | src_url=$( format_s3_url "${src_resource}" ) 72 | dst_dir=$( dirname "${dst_file}" ) || return 1 73 | 74 | mkdir -p "${dst_dir}" || return 1 75 | 76 | s3_do "${src_url}" \ 77 | --output "${dst_file}" \ 78 | <<-EOF || return 79 | GET 80 | 81 | 82 | S3_DATE 83 | ${src_resource} 84 | EOF 85 | } 86 | 87 | 88 | s3_check () { 89 | local src_bucket src_object 90 | expect_args src_bucket src_object -- "$@" 91 | 92 | local src_resource 93 | src_resource="/${src_bucket}/${src_object}" 94 | 95 | log_indent_begin "Checking s3:/${src_resource}..." 96 | 97 | local src_url 98 | src_url=$( format_s3_url "${src_resource}" ) 99 | 100 | s3_do "${src_url}" \ 101 | --output '/dev/null' \ 102 | --head \ 103 | <<-EOF || return 104 | HEAD 105 | 106 | 107 | S3_DATE 108 | ${src_resource} 109 | EOF 110 | } 111 | 112 | 113 | s3_upload () { 114 | local src_file dst_bucket dst_object dst_acl 115 | expect_args src_file dst_bucket dst_object dst_acl -- "$@" 116 | 117 | expect_existing "${src_file}" || return 1 118 | 119 | local dst_resource 120 | dst_resource="/${dst_bucket}/${dst_object}" 121 | 122 | log_indent_begin "Uploading s3:/${dst_resource}..." 123 | 124 | local src_digest 125 | src_digest=$( 126 | openssl md5 -binary <"${src_file}" | 127 | openssl base64 128 | ) || return 1 129 | 130 | local dst_url 131 | dst_url=$( format_s3_url "${dst_resource}" ) 132 | 133 | # NOTE: In some circumstances, S3 uploads fail transiently 134 | # with 400. 135 | BASHMENOT_INTERNAL_CURL_RETRY_ALL=1 \ 136 | s3_do "${dst_url}" \ 137 | --output '/dev/null' \ 138 | --header "Content-MD5: ${src_digest}" \ 139 | --header "x-amz-acl: ${dst_acl}" \ 140 | --upload-file "${src_file}" \ 141 | <<-EOF || return 142 | PUT 143 | ${src_digest} 144 | 145 | S3_DATE 146 | x-amz-acl:${dst_acl} 147 | ${dst_resource} 148 | EOF 149 | } 150 | 151 | 152 | s3_create () { 153 | local dst_bucket dst_acl 154 | expect_args dst_bucket dst_acl -- "$@" 155 | 156 | local dst_resource 157 | dst_resource="/${dst_bucket}/" 158 | 159 | log_indent_begin "Creating s3:/${dst_resource}..." 160 | 161 | local dst_url 162 | dst_url=$( format_s3_url "${dst_resource}" ) 163 | 164 | s3_do "${dst_url}" \ 165 | --output '/dev/null' \ 166 | --header "x-amz-acl: ${dst_acl}" \ 167 | --request PUT \ 168 | <<-EOF || return 169 | PUT 170 | 171 | 172 | S3_DATE 173 | x-amz-acl:${dst_acl} 174 | ${dst_resource} 175 | EOF 176 | } 177 | 178 | 179 | s3_copy () { 180 | local src_bucket src_object dst_bucket dst_object dst_acl 181 | expect_args src_bucket src_object dst_bucket dst_object dst_acl -- "$@" 182 | 183 | local src_resource dst_resource 184 | src_resource="/${src_bucket}/${src_object}" 185 | dst_resource="/${dst_bucket}/${dst_object}" 186 | 187 | log_indent_begin "Copying s3:/${src_resource} to s3:/${dst_resource}..." 188 | 189 | local dst_url 190 | dst_url=$( format_s3_url "${dst_resource}" ) 191 | 192 | s3_do "${dst_url}" \ 193 | --output '/dev/null' \ 194 | --header "x-amz-acl: ${dst_acl}" \ 195 | --header "x-amz-copy-source: ${src_resource}" \ 196 | --request PUT \ 197 | <<-EOF || return 198 | PUT 199 | 200 | 201 | S3_DATE 202 | x-amz-acl:${dst_acl} 203 | x-amz-copy-source:${src_resource} 204 | ${dst_resource} 205 | EOF 206 | } 207 | 208 | 209 | s3_delete () { 210 | local dst_bucket dst_object 211 | expect_args dst_bucket dst_object -- "$@" 212 | 213 | local dst_resource 214 | dst_resource="/${dst_bucket}/${dst_object}" 215 | 216 | log_indent_begin "Deleting s3:/${dst_resource}..." 217 | 218 | local dst_url 219 | dst_url=$( format_s3_url "${dst_resource}" ) 220 | 221 | s3_do "${dst_url}" \ 222 | --output '/dev/null' \ 223 | --request DELETE \ 224 | <<-EOF || return 225 | DELETE 226 | 227 | 228 | S3_DATE 229 | ${dst_resource} 230 | EOF 231 | } 232 | 233 | 234 | curl_list_s3 () { 235 | local url 236 | expect_args url -- "$@" 237 | 238 | log_indent_begin "Listing ${url}..." 239 | 240 | curl_do "${url}" \ 241 | --output >( read_s3_listing_xml ) || return 242 | } 243 | 244 | 245 | s3_list () { 246 | local src_bucket src_prefix 247 | expect_args src_bucket src_prefix -- "$@" 248 | 249 | local actual_bucket bucket_prefix actual_prefix 250 | actual_bucket="${src_bucket%%/*}" 251 | bucket_prefix="${src_bucket#*/}" 252 | if [[ "${bucket_prefix}" == "${src_bucket}" ]]; then 253 | bucket_prefix='' 254 | actual_prefix="${src_prefix}" 255 | else 256 | actual_prefix="${bucket_prefix}${src_prefix:+/${src_prefix}}" 257 | fi 258 | 259 | local bucket_resource src_resource 260 | bucket_resource="/${actual_bucket}/" 261 | src_resource="${bucket_resource}${actual_prefix:+?prefix=${actual_prefix}}" 262 | 263 | log_indent_begin "Listing s3:/${src_resource}..." 264 | 265 | local src_url 266 | src_url=$( format_s3_url "${src_resource}" ) 267 | 268 | s3_do "${src_url}" \ 269 | --output >( read_s3_listing_xml | sed "s:^${bucket_prefix}/::" ) \ 270 | <<-EOF || return 271 | GET 272 | 273 | 274 | S3_DATE 275 | ${bucket_resource} 276 | EOF 277 | } 278 | -------------------------------------------------------------------------------- /src/sort.sh: -------------------------------------------------------------------------------- 1 | case $( uname -s ) in 2 | 'Linux'|'FreeBSD') 3 | sort_do () { 4 | sort "$@" || return 0 5 | } 6 | ;; 7 | *) 8 | sort_do () { 9 | gsort "$@" || return 0 10 | } 11 | esac 12 | 13 | 14 | sort_natural () { 15 | sort_do -V "$@" || return 0 16 | } 17 | 18 | 19 | sort0_natural () { 20 | sort_do -Vz "$@" || return 0 21 | } 22 | -------------------------------------------------------------------------------- /src/tar.sh: -------------------------------------------------------------------------------- 1 | bashmenot_internal_tar_create () { 2 | local src_dir dst_file 3 | expect_args src_dir dst_file -- "$@" 4 | shift 2 5 | 6 | expect_existing "${src_dir}" || return 1 7 | 8 | local name format dst_dir 9 | name=$( basename "${dst_file}" ) || return 1 10 | format="${name##*.}" 11 | dst_dir=$( dirname "${dst_file}" ) || return 1 12 | 13 | mkdir -p "${dst_dir}" || return 1 14 | 15 | case "${format}" in 16 | 'tar') 17 | COPYFILE_DISABLE=1 \ 18 | tar -c -f "${dst_file}" -C "${src_dir}" "$@" '.' || return 1 19 | ;; 20 | 'gz') 21 | if which 'pigz' >'/dev/null' 2>&1; then 22 | COPYFILE_DISABLE=1 \ 23 | tar -c -C "${src_dir}" "$@" '.' | 24 | pigz -7 >"${dst_file}" || return 1 25 | else 26 | COPYFILE_DISABLE=1 \ 27 | tar -c -z -f "${dst_file}" -C "${src_dir}" "$@" '.' || return 1 28 | fi 29 | ;; 30 | 'bz2') 31 | if which 'pbzip2' >'/dev/null' 2>&1; then 32 | COPYFILE_DISABLE=1 \ 33 | tar -c -C "${src_dir}" "$@" '.' | 34 | pbzip2 -7 >"${dst_file}" || return 1 35 | else 36 | COPYFILE_DISABLE=1 \ 37 | tar -c -j -f "${dst_file}" -C "${src_dir}" "$@" '.' || return 1 38 | fi 39 | ;; 40 | 'xz') 41 | if which 'pxz' >'/dev/null' 2>&1; then 42 | COPYFILE_DISABLE=1 \ 43 | tar -c -C "${src_dir}" "$@" '.' | 44 | pxz -7 >"${dst_file}" || return 1 45 | else 46 | COPYFILE_DISABLE=1 \ 47 | tar -c -J -f "${dst_file}" -C "${src_dir}" "$@" '.' || return 1 48 | fi 49 | ;; 50 | *) 51 | log_error "Unexpected archive format: ${name}" 52 | return 1 53 | esac 54 | } 55 | 56 | 57 | bashmenot_internal_tar_extract () { 58 | local src_file dst_dir 59 | expect_args src_file dst_dir -- "$@" 60 | shift 2 61 | 62 | expect_existing "${src_file}" || return 1 63 | 64 | local name format 65 | name=$( basename "${src_file}" ) || return 1 66 | format="${name##*.}" 67 | 68 | mkdir -p "${dst_dir}" || return 1 69 | 70 | case "${format}" in 71 | 'tar') 72 | COPYFILE_DISABLE=1 \ 73 | tar -xp -f "${src_file}" -C "${dst_dir}" "$@" || return 1 74 | ;; 75 | 'gz') 76 | if which 'pigz' >'/dev/null' 2>&1; then 77 | COPYFILE_DISABLE=1 \ 78 | pigz -d <"${src_file}" | 79 | tar -xp -C "${dst_dir}" "$@" || return 1 80 | else 81 | COPYFILE_DISABLE=1 \ 82 | tar -xp -z -f "${src_file}" -C "${dst_dir}" "$@" || return 1 83 | fi 84 | ;; 85 | 'bz2') 86 | if which 'pbzip2' >'/dev/null' 2>&1; then 87 | COPYFILE_DISABLE=1 \ 88 | pbzip2 -d <"${src_file}" | 89 | tar -xp -C "${dst_dir}" "$@" || return 1 90 | else 91 | COPYFILE_DISABLE=1 \ 92 | tar -xp -j -f "${src_file}" -C "${dst_dir}" "$@" || return 1 93 | fi 94 | ;; 95 | 'xz') 96 | if which 'pxz' >'/dev/null' 2>&1; then 97 | COPYFILE_DISABLE=1 \ 98 | pxz -d <"${src_file}" | 99 | tar -xp -C "${dst_dir}" "$@" || return 1 100 | else 101 | COPYFILE_DISABLE=1 \ 102 | tar -xp -J -f "${src_file}" -C "${dst_dir}" "$@" || return 1 103 | fi 104 | ;; 105 | *) 106 | log_error "Unexpected archive format: ${name}" 107 | return 1 108 | esac 109 | } 110 | 111 | 112 | copy_file () { 113 | local src_file dst_file 114 | expect_args src_file dst_file -- "$@" 115 | 116 | expect_existing "${src_file}" || return 1 117 | 118 | local dst_dir 119 | dst_dir=$( dirname "${dst_file}" ) || return 1 120 | 121 | mkdir -p "${dst_dir}" || return 1 122 | 123 | cp "${src_file}" "${dst_file}" 2>&1 | quote || return 1 124 | } 125 | 126 | 127 | copy_dir_entry_into () { 128 | local src_dir src_file dst_dir 129 | expect_args src_dir src_file dst_dir -- "$@" 130 | shift 3 131 | 132 | expect_existing "${src_dir}/${src_file}" || return 1 133 | 134 | mkdir -p "${dst_dir}" || return 1 135 | 136 | COPYFILE_DISABLE=1 \ 137 | tar -c -f - -C "${src_dir}" "$@" "${src_file}" | 138 | tar -xp -f - -C "${dst_dir}" 2>&1 | quote || return 1 139 | } 140 | 141 | 142 | copy_dir_glob_into () { 143 | local src_dir src_glob dst_dir 144 | expect_args src_dir src_glob dst_dir -- "$@" 145 | shift 3 146 | 147 | expect_existing "${src_dir}" || return 1 148 | 149 | # TODO: Use read -rd $'\0'. 150 | 151 | local glob_file 152 | expand_glob "${src_dir}" "${src_glob}" | 153 | while read -r glob_file; do 154 | copy_dir_entry_into "${src_dir}" "${glob_file}" "${dst_dir}" "$@" || return 1 155 | done || return 1 156 | } 157 | 158 | 159 | copy_dir_into () { 160 | local src_dir dst_dir 161 | expect_args src_dir dst_dir -- "$@" 162 | shift 2 163 | 164 | expect_existing "${src_dir}" || return 1 165 | 166 | mkdir -p "${dst_dir}" || return 1 167 | 168 | COPYFILE_DISABLE=1 \ 169 | tar -c -f - -C "${src_dir}" "$@" '.' | 170 | tar -xp -f - -C "${dst_dir}" 2>&1 | quote || return 1 171 | } 172 | 173 | 174 | copy_dir_over () { 175 | local src_dir dst_dir 176 | expect_args src_dir dst_dir -- "$@" 177 | shift 2 178 | 179 | expect_existing "${src_dir}" || return 1 180 | 181 | rm -rf "${dst_dir}" || return 1 182 | 183 | copy_dir_into "${src_dir}" "${dst_dir}" "$@" || return 1 184 | } 185 | 186 | 187 | create_archive () { 188 | local src_dir dst_file 189 | expect_args src_dir dst_file -- "$@" 190 | shift 2 191 | 192 | expect_existing "${src_dir}" || return 1 193 | 194 | local name stderr 195 | name=$( basename "${dst_file}" ) || return 1 196 | stderr=$( get_tmp_file 'tar.stderr' ) || return 1 197 | 198 | log_indent_begin "Creating ${name}..." 199 | 200 | if ! bashmenot_internal_tar_create "${src_dir}" "${dst_file}" "$@" 2>"${stderr}"; then 201 | log_indent_end 'error' 202 | 203 | quote <"${stderr}" 204 | return 1 205 | fi 206 | 207 | local size 208 | size=$( get_size "${dst_file}" ) || return 1 209 | log_indent_end "done, ${size}" 210 | 211 | rm -f "${stderr}" || true 212 | } 213 | 214 | 215 | extract_archive_into () { 216 | local src_file dst_dir 217 | expect_args src_file dst_dir -- "$@" 218 | shift 2 219 | 220 | expect_existing "${src_file}" || return 1 221 | 222 | local name stderr 223 | name=$( basename "${src_file}" ) || return 1 224 | stderr=$( get_tmp_file 'tar.stderr' ) || return 1 225 | 226 | log_indent_begin "Extracting ${name}..." 227 | 228 | if ! bashmenot_internal_tar_extract "${src_file}" "${dst_dir}" "$@" 2>"${stderr}"; then 229 | log_indent_end 'error' 230 | 231 | quote <"${stderr}" 232 | return 1 233 | fi 234 | 235 | local size 236 | size=$( get_size "${dst_dir}" ) || return 1 237 | log_indent_end "done, ${size}" 238 | 239 | rm -f "${stderr}" || true 240 | } 241 | 242 | 243 | extract_archive_over () { 244 | local src_file dst_dir 245 | expect_args src_file dst_dir -- "$@" 246 | shift 2 247 | 248 | expect_existing "${src_file}" || return 1 249 | 250 | rm -rf "${dst_dir}" || return 1 251 | 252 | extract_archive_into "${src_file}" "${dst_dir}" || return 1 253 | } 254 | 255 | 256 | case $( uname -s ) in 257 | 'Linux'|'FreeBSD') 258 | strip_tree () { 259 | local dir 260 | expect_args dir -- "$@" 261 | 262 | local file 263 | find "${dir}" "$@" -type f -print0 2>'/dev/null' | 264 | sort0_natural | 265 | while read -rd $'\0' file; do 266 | strip --strip-unneeded "${file}" 2>'/dev/null' | quote || true 267 | done || return 0 268 | } 269 | ;; 270 | 'Darwin') 271 | strip_tree () { 272 | local dir 273 | expect_args dir -- "$@" 274 | 275 | local file 276 | find "${dir}" "$@" -type f -print0 2>'/dev/null' | 277 | sort0_natural | 278 | while read -rd $'\0' file; do 279 | strip -u -r "${file}" 2>'/dev/null' | quote || true 280 | done || return 0 281 | } 282 | ;; 283 | *) 284 | strip_tree () { 285 | log_warning 'Cannot strip' 286 | } 287 | esac 288 | --------------------------------------------------------------------------------