├── .agignore
├── .bash_aliases
├── .bash_completion.d
├── adb
├── consul
├── emulator
├── git
├── rake
├── ssh-ca
├── svn
├── terraform
├── tmux
├── triage
└── vault
├── .bash_lib.d
├── chdir_hook.bash
├── colors.bash
├── direnv.bash
├── git-command-hooks.bash
├── history.bash
├── is_remote.bash
├── toggle_remote.bash
├── utility_functions.bash
└── window_commands.bash
├── .bash_prompt
├── .bash_prompt.d
├── git-prompt.bash
├── preferred_color.bash
├── project-prompt.bash
└── remote-prompt.bash
├── .bashlib
├── .bashrc_macos
├── .bashrc_ubuntu
├── .config
├── git
│ ├── attributes
│ └── ignore
└── starship.toml
├── .gitignore
├── .hammerspoon
├── Spoons
│ ├── ReloadConfiguration.spoon
│ │ ├── docs.json
│ │ └── init.lua
│ └── SpoonInstall.spoon
│ │ ├── docs.json
│ │ └── init.lua
└── init.lua
├── .inputrc
├── .ipython
├── ipython_config.py
└── ipython_helpers.py
├── .irbrc
├── .lessfilter
├── .pryrc
├── .psqlrc
├── .tmux.conf
├── .tmux_profile
├── .vim
├── ftplugin
│ └── python.vim
└── vimrc
├── README.md
├── bin
├── bash_historian
├── cidr
├── describe
├── field
├── fs_advisory_lock
├── git-alias
├── git-prank
├── git-surgery
├── graceful_kill
├── groupby
├── install_dispatching_hook
├── jsonschema
├── jsonschema2
├── least
├── lines
├── organize
├── ppsql
├── rmate
├── run_services
├── shell_patterns
├── sort_scanned_files
├── triage
└── wifiqr
├── githooks
├── dispatching_hook
├── install_hooks
├── post-command-pull.sync_repos
├── post-receive.async_mirror
├── post-receive.multiprimary
├── post-receive.sync_mirror
├── post-svn-dcommit
├── post-svn-rebase
├── pre-command-commit.refuse_shared_author
├── pre-command-merge.non_tracking_branch
├── pre-command-pull.sync_repos
├── pre-command-pull.track_branch
├── pre-command-push.auto_pull
├── pre-push.refuse_WIP_commits
├── pre-receive.multiprimary
├── pre-svn-dcommit
└── pre-svn-rebase
├── install_bashfiles.bash
├── ssh
├── list_authorized_keys.bash
├── rotate_all_keys.bash
├── rotate_ssh_key.bash
├── setup_ssh_server.bash
├── ssh-acme.bash
├── ssh-auto.bash
├── ssh-ca.bash
└── ssh-manager.bash
└── templates
├── .bash_features
├── .config
├── git
│ └── config
└── nano
│ └── nanorc
└── .ssh
└── config
/.agignore:
--------------------------------------------------------------------------------
1 | .git/
2 |
--------------------------------------------------------------------------------
/.bash_aliases:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ## directory aliases ##
4 | #######################
5 |
6 | # directory traversal
7 | alias up='cd ..'
8 | alias cd..='cd ..'
9 |
10 | # directory listing
11 | alias ll='ls -AhlF'
12 | alias la='ls -AF'
13 | alias l.='ls -d .*'
14 | alias l='ls -CF'
15 |
16 | ## shell commands ##
17 | ####################
18 |
19 | # Taken from https://stackoverflow.com/a/18839557
20 | function rename_function() {
21 | test -n "$(declare -f "$1")" || return
22 | eval "${_/$1/$2}"
23 | unset -f "$1"
24 | }
25 |
26 | # https://askubuntu.com/questions/22037/aliases-not-available-when-using-sudo/22043#22043
27 | # expand aliases after sudo
28 | alias sudo='sudo '
29 |
30 | # useful alias function for logging in as someone else using sudo.
31 | # Especially useful for executing programs in the context of a service user (e.g. www-data)
32 | function loginas() {
33 | local user="$1"
34 | shift
35 | sudo -i -H -u "$user" "$@"
36 | }
37 |
38 | function ui_process() {
39 | if [ $# -eq 0 ]; then
40 | echo "USAGE: ui_process COMMAND [ARGS...]"
41 | return 1
42 | fi
43 | (eval "$@" 2>&1 &) >/dev/null
44 | }
45 |
46 | function faketty { script -qfc "$(printf "%q " "$@")"; }
47 |
48 | function exitcode() {
49 | local code="${1:-0}"
50 | return "$code"
51 | }
52 |
53 | # Launch explain shell website for a commnad
54 | function explain {
55 | # base url with first command already injected
56 | # $ explain tar
57 | # => http://explainshel.com/explain/tar?args=
58 | url="http://explainshell.com/explain/$1?args="
59 |
60 | # removes $1 (tar) from arguments ($@)
61 | shift;
62 |
63 | # iterates over remaining args and adds builds the rest of the url
64 | for i in "$@"; do
65 | url=$url"$i""+"
66 | done
67 |
68 | # opens url in browser
69 | open "$url"
70 | }
71 |
72 | muxmux () {
73 | current_user="${REMOTE_USER:-$USER}"
74 | tmux -L "${current_user}" new -A -s "${current_user}"
75 | }
76 | function sshmux() {
77 | current_user="${REMOTE_USER:-$USER}"
78 | # shellcheck disable=SC2029
79 | ssh -t "$@" REMOTE_USER="${current_user}" tmux -L "${current_user}" new -A -s "${current_user}"
80 | }
81 | function moshmux() {
82 | current_user="${REMOTE_USER:-$USER}"
83 | mosh "$@" -- tmux -L "${current_user}" new -A -s "${current_user}"
84 | }
85 | alias mux='tmuxinator'
86 | alias va='$VISUAL ~/.bash_aliases'
87 | alias sa='. ~/.bash_aliases'
88 | alias h\?='history | grep'
89 | function sync_history() {
90 | if [[ -f "$1" ]]; then
91 | local OLD_HISTFILE="$HISTFILE"
92 | HISTFILE="$1"
93 | history -r
94 | HISTFILE="$OLD_HISTFILE"
95 | fi
96 | history -a
97 | history -c
98 | history -r
99 | }
100 | alias frequent_history='history | cut -c30- | sort | uniq -c | sort -nr | head' # for finding common commands to ignore
101 | alias htmlmail='python -c '"'"'import cgi,sys; print("
" + cgi.escape(sys.stdin.read()).encode("ascii","xmlcharrefreplace") + "
")'"'"' | mail -E -a "Content-Type: text/html" '
102 | alias bashquote='python -c "import sys,pipes; print pipes.quote(sys.stdin.readline().strip())"'
103 |
104 | function cat() {
105 | # test if STDOUT is a tty, and preemptively truncate output
106 | if [[ -t 1 ]]; then
107 | local output="$(tail -n $LINES -q -- "$@")"
108 | local trimmed="$(tail -n $((LINES - 1)) -q <<<"$output")"
109 | if [[ "$trimmed" != "$output" ]]; then
110 | echo "--- snipped. use less or grep ---"
111 | fi
112 | command cat <<<"$trimmed"
113 | else
114 | command cat "$@"
115 | fi
116 | }
117 |
118 | function mkcd() {
119 | mkdir -p "$@" && cd "$@" || return 1
120 | }
121 |
122 | function anywait() {
123 | for pid in "$@"; do
124 | while kill -0 "$pid"; do
125 | sleep 0.5
126 | done
127 | done
128 | }
129 |
130 | function track() {
131 | date
132 | command time -v "$@"
133 | date
134 | notify "completed $*"
135 | }
136 |
137 | function mstime() {
138 | local start end elapsedms
139 | start="$(date +%s%N)"
140 | command time -v "$@"
141 | end="$(date +%s%N)"
142 | elapsedms=$(( (end - start) / 1000000 ))
143 | printf '\tTime taken: %sms\n' "$elapsedms"
144 | }
145 |
146 | function whichlink() {
147 | file="$(type -p "$1")"
148 | if [[ -z "$file" ]]; then
149 | local commandtype
150 | commandtype="$(type -t "$1")"
151 | case "$(type -t "$1")" in
152 | alias)
153 | type "$1"
154 | ;;
155 | *)
156 | echo "$1 is a $commandtype"
157 | ;;
158 | esac
159 | return
160 | fi
161 | while [[ -L "$file" ]]; do
162 | ls -AhlF "$file"
163 | nextfile="$(readlink -n "$file")"
164 | if [[ "$nextfile" = /* ]]; then
165 | file="$nextfile"
166 | else
167 | file="$(dirname "$file")/$nextfile"
168 | fi
169 | done
170 | ls -AhlF "$file"
171 | }
172 |
173 | ## version control ##
174 | #####################
175 |
176 | # some git aliases
177 | alias gg='ui_process "git gui"'
178 | alias gk='ui_process "gitk"'
179 |
180 | function loc() {
181 | local target="${1:-*}"
182 | echo " lines words chars filename"
183 | find . -type f -name "$target" -exec wc {} +
184 | }
185 |
186 | function gitstat() {
187 | git log --author="$1" --pretty=tformat: --numstat | awk '{ adds += $1; subs += $2; loc += $1 - $2 } END { printf "added: %s removed: %s total: %s\n",adds,subs,loc }' -
188 | }
189 |
190 | function git-fetch-mirror() {
191 | local remote="$1"
192 | git ls-remote "$remote" origin/* | cut -d/ -f3- | sed 's/\(.*\)/\1:\1/' | grep -v -e 'HEAD' | xargs git fetch "$remote"
193 | }
194 |
195 | alias trim_trailing_spaces='sed -i -e '"'"'s/[ \t]*$//'"'"''
196 | alias rm_conflicts="find . \( -name '*.orig' \) -delete"
197 |
198 | ## Service Development aliases ##
199 | #################################
200 |
201 | function jsoncurl() {
202 | curl -H "Accept: application/json" -H "Content-Type: application/json" "$@"
203 | echo
204 | }
205 |
206 | alias blog_serve='bundle exec jekyll serve -D -w --config _config_development.yml &'
207 | alias prc='RAILS_ENV=production RACK_ENV=production bundle exec rails console'
208 | alias myip='curl http://httpbin.org/ip 2>/dev/null | jq -r .origin'
209 | alias deploy='git push heroku && git push origin && heroku run rake db:migrate && notify deployed'
210 | alias extractip4='grep -o -E -e '"'"'[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}'"'"''
211 | alias dockerip='ip -4 addr show docker0 | extractip4'
212 | alias dockergc='docker images -f dangling=true | tail -n+2 | cut -c41-52 | xargs -I {} docker rmi {}'
213 | alias ipy='ipython'
214 |
215 | function docker-ssh-push() {
216 | docker save "$2" | bzip2 | pv | ssh "$1" 'bunzip2 | docker load'
217 | }
218 |
219 | alias json2bson='ruby -rjson -rbson -n -e "puts JSON.parse(\$_).to_bson.to_s"'
220 | alias bson2json='ruby -rjson -rbson -n -e "puts Hash.from_bson(BSON::ByteBuffer.new(\$_)).to_json"'
221 |
222 | ## Silliness ##
223 | ###############
224 | alias fucking='sudo '
225 | alias please='sudo '
226 |
227 | function xkcd() {
228 | local open_command=""
229 | # prefer xdg-open
230 | if type -f xdg-open >/dev/null 2>&1; then
231 | open_command="xdg-open"
232 | elif type -f open >/dev/null 2>&1; then
233 | open_command="open"
234 | fi
235 |
236 | case "$@" in
237 | ''|*[!0-9]*)
238 | #"$open_command" "http://www.explainxkcd.com/wiki/index.php?go=Go&title=Special%3ASearch&search=$@"
239 | "$open_command" "https://relevant-xkcd.github.io/?q=$*"
240 | ;;
241 | *)
242 | "$open_command" "https://xkcd.com/$(($1))"
243 | ;;
244 | esac
245 | }
246 |
--------------------------------------------------------------------------------
/.bash_completion.d/adb:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | __adb_completion() {
4 | local current_word=${COMP_WORDS[COMP_CWORD]}
5 | local previous_word=${COMP_WORDS[COMP_CWORD-1]}
6 | local all_words=("${COMP_WORDS[@]}")
7 | local which_word=$COMP_CWORD
8 | if [[ $current_word == *"="* ]]; then
9 | previous_word=${current_word%=*}
10 | current_word=${current_word#*=}
11 | fi
12 |
13 | local words=""
14 | case $previous_word in
15 | -s)
16 | words=$(cache_command ~/.adb_devices~ "adb devices | tail -n +2 | cut -f 1 | tr '\n' ' '")
17 | ;;
18 | uninstall)
19 | words=$(cache_command ~/.adb_packages~ "adb shell pm list packages -3 | cut -d ":" -f 2 | tr -d '\r' | tr '\n' ' '")
20 | ;;
21 | devices|install|shell) ;;
22 | start-server) ;;
23 | *)
24 | if [[ ${current_word:0:1} == "-" ]]; then
25 | words="-d -e -s"
26 | else
27 | # display only commands
28 | words="devices install uninstall shell start-server"
29 | fi
30 | esac
31 | COMPREPLY=($(compgen -W "$words" -- $current_word))
32 | }
33 |
34 | complete -o default -o nospace -F __adb_completion adb
35 |
--------------------------------------------------------------------------------
/.bash_completion.d/consul:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # This file was taken from https://github.com/hashicorp/consul/blob/master/contrib/bash-completion/_consul
4 |
5 | # This completion file has been inspired by the completion files of the Git and
6 | # the Docker projects.
7 |
8 | __consulcomp() {
9 | local all c s=$'\n' IFS=' '$'\t'$'\n'
10 | local cur="${COMP_WORDS[COMP_CWORD]}"
11 |
12 | for c in $1; do
13 | case "$c$4" in
14 | --*=*) all="$all$c$4$s" ;;
15 | *) all="$all$c$4 $s" ;;
16 | esac
17 | done
18 | IFS=$s
19 | COMPREPLY=($(compgen -P "$2" -W "$all" -- "$cur"))
20 | return
21 | }
22 |
23 | __consul_agent() {
24 | local subcommands="
25 | -advertise
26 | -advertise-wan
27 | -atlas
28 | -atlas-join
29 | -atlas-token
30 | -atlas-endpoint
31 | -bootstrap
32 | -bind
33 | -http-port
34 | -bootstrap-expect
35 | -client
36 | -config-file
37 | -config-dir
38 | -data-dir
39 | -recursor
40 | -dc
41 | -encrypt
42 | -join
43 | -join-wan
44 | -retry-join
45 | -retry-interval
46 | -retry-max-wan
47 | -log-level
48 | -node
49 | -protocol
50 | -rejoin
51 | -server
52 | -syslog
53 | -ui
54 | -ui-dir
55 | -pid-file
56 | "
57 | __consulcomp "-help $subcommands"
58 | }
59 |
60 | __consul_configtest() {
61 | local subcommands="
62 | -config-file
63 | -config-dir
64 | "
65 | __consulcomp "-help $subcommands"
66 | }
67 |
68 | __consul_event() {
69 | local subcommands="
70 | -http-addr
71 | -datacenter
72 | -name
73 | -node
74 | -service
75 | -tag
76 | -token
77 | "
78 | __consulcomp "-help $subcommands"
79 | }
80 |
81 | __consul_exec() {
82 | local subcommands="
83 | -http-addr
84 | -datacenter
85 | -prefix
86 | -node
87 | -service
88 | -tag
89 | -wait
90 | -wait-repl
91 | -token
92 | "
93 |
94 | __consulcomp "-help $subcommands"
95 | }
96 |
97 | __consul_force_leave() {
98 | __consulcomp "-help -rpc-addr"
99 | }
100 |
101 | __consul_info() {
102 | __consulcomp "-help -rpc-addr"
103 | }
104 |
105 | __consul_join() {
106 | local subcommands="
107 | -rpc-addr
108 | -wan
109 | "
110 |
111 | __consulcomp "-help $subcommands"
112 | }
113 |
114 | __consul_keygen() {
115 | # NOTE: left empty on purpose.
116 | return
117 | }
118 |
119 | __consul_keyring() {
120 | local subcommands="
121 | -install
122 | -list
123 | -remove
124 | -token
125 | -use
126 | -rpc-addr
127 | "
128 |
129 | __consulcomp "-help $subcommands"
130 | }
131 |
132 | __consul_leave() {
133 | __consulcomp "-help -rpc-addr"
134 | }
135 |
136 | __consul_lock() {
137 | local subcommands="
138 | -http-addr
139 | -n
140 | -name
141 | -token
142 | -pass-stdin
143 | -try
144 | -monitor-retry
145 | -verbose
146 | "
147 |
148 | __consulcomp "-help $subcommands"
149 | }
150 |
151 | __consul_maint() {
152 | local subcommands="
153 | -enable
154 | -disable
155 | -reason
156 | -service
157 | -token
158 | -http-addr
159 | "
160 |
161 | __consulcomp "-help $subcommands"
162 | }
163 |
164 | __consul_members() {
165 | local subcommands="
166 | -detailed
167 | -rpc-addr
168 | -status
169 | -wan
170 | "
171 |
172 | __consulcomp "-help $subcommands"
173 | }
174 |
175 | __consul_monitor() {
176 | local subcommands="
177 | -log-level
178 | -rpc-addr
179 | "
180 |
181 | __consulcomp "-help $subcommands"
182 | }
183 |
184 | __consul_reload() {
185 | __consulcomp "-help -rpc-addr"
186 | }
187 |
188 | __consul_rtt() {
189 | local subcommands="
190 | -wan
191 | -http-addr
192 | "
193 |
194 | __consulcomp "-help $subcommands"
195 | }
196 |
197 | __consul_version() {
198 | # NOTE: left empty on purpose.
199 | return
200 | }
201 |
202 | __consul_watch() {
203 | local subcommands="
204 | -http-addr
205 | -datacenter
206 | -token
207 | -key
208 | -name
209 | -passingonly
210 | -prefix
211 | -service
212 | -state
213 | -tag
214 | -type
215 | "
216 |
217 | __consulcomp "-help $subcommands"
218 | }
219 |
220 | __consul() {
221 | local c=1 command
222 | while [ $c -lt $COMP_CWORD ]; do
223 | cmd="${COMP_WORDS[c]}"
224 | case "$cmd" in
225 | -*) ;;
226 | *) command="$cmd"
227 | esac
228 | c=$((++c))
229 | done
230 |
231 | local cmds="
232 | agent
233 | configtest
234 | event
235 | exec
236 | force-leave
237 | info
238 | join
239 | keygen
240 | keyring
241 | leave
242 | lock
243 | maint
244 | members
245 | monitor
246 | reload
247 | rtt
248 | version
249 | watch
250 | "
251 |
252 | local globalflags="--help --version"
253 |
254 | # Complete a command.
255 | if [ $c -eq $COMP_CWORD -a -z "$command" ]; then
256 | case "${COMP_WORDS[COMP_CWORD]}" in
257 | -*|--*) __consulcomp "$globalflags" ;;
258 | *) __consulcomp "$cmds" ;;
259 | esac
260 | return
261 | fi
262 |
263 | # Command options.
264 | local completion_func="__consul_${command//-/_}"
265 | declare -f $completion_func >/dev/null && $completion_func && return
266 | }
267 |
268 | complete -o default -o nospace -F __consul consul
269 |
--------------------------------------------------------------------------------
/.bash_completion.d/emulator:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | __android_emulator_completion() {
4 | local words=""
5 | if ((COMP_CWORD == 1)); then
6 | # display only commands
7 | words="-avd -help"
8 | else
9 | case ${COMP_WORDS[1]} in
10 | -avd)
11 | words=$(cache_command ~/.android_emulators~ "android list avd | grep -E -o 'Name: \S+' | cut -d ' ' -f 2")
12 | ;;
13 | esac
14 | fi
15 | COMPREPLY=($(compgen -W "$words" -- ${COMP_WORDS[COMP_CWORD]}))
16 | }
17 |
18 | complete -o default -o nospace -F __android_emulator_completion emulator
19 |
--------------------------------------------------------------------------------
/.bash_completion.d/rake:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Bash completion support for Rake, Ruby Make.
3 |
4 | _rakecomplete() {
5 | local cur
6 | _get_comp_words_by_ref -n : cur
7 |
8 | if [ -f Rakefile ]; then
9 | recent=`ls -t .rake_tasks~ Rakefile *.rake **/*.rake 2> /dev/null | head -n 1`
10 | if [[ $recent != '.rake_tasks~' ]]; then
11 | rake -P | grep 'rake' | cut -d " " -f 2 > .rake_tasks~
12 | fi
13 | COMPREPLY=($(compgen -W "`cat .rake_tasks~`" -- $cur))
14 | __ltrim_colon_completions "$cur"
15 |
16 | return 0
17 | fi
18 | }
19 |
20 | complete -o default -o nospace -F _rakecomplete rake
21 |
--------------------------------------------------------------------------------
/.bash_completion.d/ssh-ca:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | __sshca_completion() {
4 | local current_word=${COMP_WORDS[COMP_CWORD]}
5 | local previous_word=${COMP_WORDS[COMP_CWORD-1]}
6 | local all_words=("${COMP_WORDS[@]}")
7 | local which_word=$COMP_CWORD
8 | if [[ $current_word == *"="* ]]; then
9 | previous_word=${current_word%=*}
10 | current_word=${current_word#*=}
11 | fi
12 |
13 | local words=""
14 | if (($which_word == 1)); then
15 | # display only commands
16 | words="help setup install revoke sign signhost trustconfig selfdestruct uninstall implode"
17 | else
18 | case ${COMP_WORDS[1]} in
19 | help)
20 | ;;
21 | setup)
22 | #TODO: complete a path
23 | ;;
24 | install)
25 | #TODO: complete ssh hosts/servers
26 | ;;
27 | revoke)
28 | ;;
29 | sign)
30 | #TODO: complete paths, then options for ssh-keygen
31 | ;;
32 | signhost)
33 | #TODO: complete paths/ssh hosts/servers, then options for ssh-keygen
34 | ;;
35 | trustconfig)
36 | ;;
37 | selfdestruct|uninstall|implode)
38 | ;;
39 | esac
40 | fi
41 | COMPREPLY=($(compgen -W "$words" -- $current_word))
42 | }
43 |
44 | complete -o default -o nospace -F __sshca_completion ssh-ca
45 |
--------------------------------------------------------------------------------
/.bash_completion.d/svn:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | __svn_complete_config_option() {
4 | # TODO: split the current word into key/value
5 | "server:global:http-library=serf"
6 | }
7 |
8 | __svn_complete_global_options() {
9 | echo "--username --password --no-auth-cache --non-interactive --trust-server-cert --config-dir --config-option"
10 | }
11 |
12 | __svn_completion() {
13 | local current_word=${COMP_WORDS[COMP_CWORD]}
14 | local previous_word=${COMP_WORDS[COMP_CWORD-1]}
15 | local all_words=("${COMP_WORDS[@]}")
16 | local which_word=$COMP_CWORD
17 | if [[ $current_word == *"="* ]]; then
18 | previous_word=${current_word%=*}
19 | current_word=${current_word#*=}
20 | fi
21 |
22 | local words=""
23 | case $previous_word in
24 | checkout|commit) ;;
25 | cat|changelist|cleanup|export|import) ;;
26 | info|list|lock|log|merge|unlock) ;;
27 | mergeinfo|propdel|propedit|propget|proplist|propset);;
28 | resolve|resolved|revert|status|switch|update) ;;
29 | rename|ren|add|copy|delete|move|mkdir) ;;
30 | cl|co|ci|cp|del|remove|rm|di|ls|h|mv) ;;
31 | pdel|pd|pedit|pe|pget|pg|plist|pl|pset|ps|stat|st|sw|up) ;;
32 |
33 | blame)
34 | if [[ ${current_word:0:1} == "-" ]]; then
35 | words="$(__svn_complete_global_options) -r --revision -v --verbose -g --use-merge-history --incremental --xml -x --extensions --force"
36 | fi
37 | ;;
38 | --config-dir) ;; # should complete to a directory
39 | --config-option) word="$(__svn_complete_config_option)";;
40 | *)
41 | if [[ ${current_word:0:1} == "-" ]]; then
42 | words="--username --password --no-auth-cache --non-interactive --trust-server-cert --config-dir --config-option"
43 | else
44 | # display only commands
45 | words="add blame cat changelist checkout cleanup commit copy delete diff export help import info list lock log merge"
46 | words="$words mergeinfo mkdir move propdel propedit propget proplist propset resolve resolved revert status switch unlock update"
47 | words="$words praise annotate ann"
48 | words="$words cl co ci cp del remove rm di ls h mv rename ren pdel pd pedit pe pget pg plist pl pset ps stat st sw up"
49 | fi
50 | esac
51 | COMPREPLY=($(compgen -W "$words" -- $current_word))
52 | }
53 |
54 | complete -o default -o nospace -F __svn_completion svn
55 |
--------------------------------------------------------------------------------
/.bash_completion.d/terraform:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Bash Terraform completion
4 | # Adapted from https://github.com/Bash-it/bash-it/blob/master/completion/available/terraform.completion.bash
5 |
6 | _terraform () {
7 | local cmds cur colonprefixes colonsuffixes
8 | cmds="apply destroy fmt get graph import init \
9 | output plan push refresh remote show taint \
10 | untaint validate version state"
11 |
12 | COMPREPLY=()
13 | cur=${COMP_WORDS[COMP_CWORD]}
14 | # Work-around bash_completion issue where bash interprets a colon
15 | # as a separator.
16 | # Work-around borrowed from the darcs work-around for the same
17 | # issue.
18 | colonsuffixes="${cur##*:}"
19 | colonprefixes="${cur%"$colonsuffixes"}"
20 | COMPREPLY=( $(compgen -W "$cmds" -- "$cur") )
21 | local i=${#COMPREPLY[*]}
22 | while [ $((--i)) -ge 0 ]; do
23 | COMPREPLY[$i]=${COMPREPLY[$i]#"$colonprefixes"}
24 | done
25 |
26 | return 0
27 | } && complete -F _terraform terraform
28 |
--------------------------------------------------------------------------------
/.bash_completion.d/tmux:
--------------------------------------------------------------------------------
1 | # vim ft=sh
2 | # START tmux completion
3 | # This file is in the public domain
4 | # See: http://www.debian-administration.org/articles/317 for how to write more.
5 | # Usage: Put "source bash_completion_tmux.sh" into your .bashrc
6 | _tmux()
7 | {
8 | local cur prev words cword;
9 | _init_completion || return;
10 | if [[ $cword -eq 1 ]]; then
11 | COMPREPLY=($( compgen -W "$(tmux list-commands | cut -d' ' -f1)" -- "$cur" ));
12 | return 0
13 | fi
14 | }
15 | complete -F _tmux tmux
16 |
17 | # END tmux completion
18 |
19 |
--------------------------------------------------------------------------------
/.bash_completion.d/triage:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | __triage_completion() {
4 | local current_word=${COMP_WORDS[COMP_CWORD]}
5 | local previous_word=${COMP_WORDS[COMP_CWORD-1]}
6 | local all_words=("${COMP_WORDS[@]}")
7 | local which_word=$COMP_CWORD
8 | if [[ $current_word == *"="* ]]; then
9 | previous_word=${current_word%=*}
10 | current_word=${current_word#*=}
11 | fi
12 |
13 | local words=""
14 | if (($which_word == 1)); then
15 | # display only commands
16 | words="help what defer all add log resolve"
17 | else
18 | case ${COMP_WORDS[1]} in
19 | help)
20 | words="help what defer all add log resolve"
21 | ;;
22 | what)
23 | ;;
24 | defer)
25 | ;;
26 | all)
27 | ;;
28 | add)
29 | ;;
30 | log)
31 | ;;
32 | resolve)
33 | ;;
34 | esac
35 | fi
36 | COMPREPLY=($(compgen -W "$words" -- $current_word))
37 | }
38 |
39 | complete -o default -o nospace -F __triage_completion triage
40 |
--------------------------------------------------------------------------------
/.bash_completion.d/vault:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # ---------------------------------------------------------------------------
4 | # vault-bash-completion
5 | #
6 | # This adds bash completions for [HashiCorp Vault](https://www.vaultproject.io/)
7 | #
8 | # Based on https://github.com/iljaweis/vault-bash-completion
9 | # ---------------------------------------------------------------------------
10 |
11 | function _vault_mounts() {
12 | (
13 | set -euo pipefail
14 | if ! vault mounts 2> /dev/null | awk 'NR > 1 {print $1}'; then
15 | echo "secret"
16 | fi
17 | )
18 | }
19 |
20 | function _vault() {
21 | local policies cur prev VAULT_COMMANDS path
22 |
23 | VAULT_COMMANDS=$(vault 2>&1 | grep -E '^ +' | awk '{print $1}')
24 |
25 | if [ "$COMP_CWORD" -gt 0 ]; then
26 | cur=${COMP_WORDS[COMP_CWORD]}
27 | prev=${COMP_WORDS[COMP_CWORD-1]}
28 | fi
29 |
30 | local line=${COMP_LINE}
31 |
32 | if [[ $prev =~ ^(policies|policy-write|policy-delete) ]]; then
33 | policies=$(vault policies 2> /dev/null)
34 | COMPREPLY=($(compgen -W "$policies" -- "$cur"))
35 | elif [ "$(echo "$line" | wc -w)" -le 2 ]; then
36 | if [[ "$line" =~ ^vault\ (read|write|delete|list)\ $ ]]; then
37 | COMPREPLY=($(compgen -W "$(_vault_mounts)" -- ''))
38 | else
39 | COMPREPLY=($(compgen -W "$VAULT_COMMANDS" -- "$cur"))
40 | fi
41 | elif [[ "$line" =~ ^vault\ (read|write|delete|list)\ (.*)$ ]]; then
42 | path=${BASH_REMATCH[2]}
43 | if [[ "$path" =~ ^([^ ]+)/([^ /]*)$ ]]; then
44 | list=$(vault list -format=yaml "${BASH_REMATCH[1]}" 2> /dev/null | awk '{ print $2 }')
45 | COMPREPLY=($(compgen -W "$list" -P "${BASH_REMATCH[1]}/" -- "${BASH_REMATCH[2]}"))
46 | else
47 | COMPREPLY=($(compgen -W "$(_vault_mounts)" -- "$path"))
48 | fi
49 | fi
50 | }
51 |
52 | complete -o default -o nospace -F _vault vault
53 |
--------------------------------------------------------------------------------
/.bash_lib.d/chdir_hook.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Change directory hook
4 | #
5 | # When the directory is changed, this function will run the CHDIR_COMMAND environment variable
6 | # You should assume that whatever is already in this command is fully delimited, including semicolons
7 | function on_chdir() {
8 | if [[ "$PWD" != "$ONCHDIR_OLDPWD" ]]; then
9 | eval "$CHDIR_COMMAND"
10 | ONCHDIR_OLDPWD="$PWD"
11 | fi
12 | }
13 |
14 | if [[ ! "$PROMPT_COMMAND" == *"on_chdir"* ]]; then
15 | PROMPT_COMMAND="on_chdir${PROMPT_COMMAND:+;$PROMPT_COMMAND}"
16 | fi
17 |
--------------------------------------------------------------------------------
/.bash_lib.d/colors.bash:
--------------------------------------------------------------------------------
1 | # This file is meant to determine color display capabilities and optionally set up some handy shortcuts
2 |
3 | # set a fancy prompt (non-color, unless we know we "want" color)
4 | case "$TERM" in
5 | *-color) color_prompt=yes;;
6 | esac
7 |
8 | if [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then
9 | # We have color support; assume it's compliant with Ecma-48
10 | # (ISO/IEC-6429). (Lack of such support is extremely rare, and such
11 | # a case would tend to support setf rather than setaf.)
12 | color_prompt=yes
13 | else
14 | color_prompt=
15 | fi
16 |
--------------------------------------------------------------------------------
/.bash_lib.d/direnv.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Loads environment variables from all the .env files from the PWD to the root
4 | #
5 | # Intended for use as a way to push environment variables into files that are scoped
6 | # to a project. Note that if you overwrite a variable, it will not be unset.
7 |
8 | function _direnv_load_envfile() {
9 | local envfile="$1"
10 | local key
11 | local value
12 | # read in key value pairs, ignoring comments
13 |
14 | while IFS="=" read -r key value; do
15 | case "$key" in
16 | "#*") ;;
17 | *) eval "export $key='$value'" ;;
18 | esac
19 | done < "$envfile"
20 | }
21 |
22 | function _direnv_unload_envfile() {
23 | local envfile="$1"
24 | local key
25 | local value
26 | # read in key value pairs, ignoring comments
27 |
28 | while IFS="=" read -r key value; do
29 | case "$key" in
30 | "#*") ;;
31 | *) [[ "${!key}" == "$value" ]] && eval "unset -v $key" ;;
32 | esac
33 | done < "$envfile"
34 | }
35 |
36 | function _direnv_do_with_envfiles() {
37 | local curdir="$1"
38 | shift
39 | local parentdir
40 | local -a files=()
41 | while :; do
42 | parentdir="$(dirname "$curdir")"
43 | if [[ -f "$curdir"/.env ]]; then
44 | files+=("$curdir"/.env)
45 | fi
46 | [[ "$parentdir" == "$curdir" ]] && break
47 | curdir="$parentdir"
48 | done
49 | local idx
50 | for (( idx=${#files[@]}-1 ; idx>=0 ; idx-- )) ; do
51 | "$@" "${files[idx]}"
52 | done
53 | }
54 |
55 | function _direnv_on_chdir() {
56 | _direnv_do_with_envfiles "$ONCHDIR_OLDPWD" _direnv_unload_envfile
57 | _direnv_do_with_envfiles "$PWD" _direnv_load_envfile
58 | }
59 |
60 | if [[ "$BASHFEATURE_DIRENV_ENABLED" == "true" && ! "$CHDIR_COMMAND" == *"_direnv_on_chdir"* ]]; then
61 | CHDIR_COMMAND="_direnv_on_chdir${CHDIR_COMMAND:+;$CHDIR_COMMAND}"
62 | _direnv_on_chdir
63 | fi
64 |
--------------------------------------------------------------------------------
/.bash_lib.d/git-command-hooks.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # git-command-hooks -- a wrapper for git that allows hooks
4 | #
5 | # Note: adding the --force flag to the command
6 | # causes to skip the execution of the precommit hook.
7 | #
8 | # Version: 0.1.0
9 | #
10 | # Bash-only variant of git-command-hooks, intended to avoid invoking extra processes.
11 | # Especially since git is invoked several times in building a prompt,
12 | # this can cause unnecessary delays when working with a system under extreme load.
13 | #
14 | # Based on: https://github.com/rkitover/git-svn-hooks
15 | # Based on: https://raw.github.com/hkjels/.dotfiles/master/zsh/git-svn.zsh
16 | #
17 | # Author: stevenkaras: Steven Karas (steven.karas@gmail.com)
18 | #
19 | # repo: https://github.com/stevenkaras/bashfiles
20 |
21 | function git() {
22 | local _gitdir
23 | _gitdir="$(command git rev-parse --git-dir 2>/dev/null)"
24 |
25 | # Expand git aliases
26 | local _param_1="$1"
27 | local _expanded
28 | _expanded="$(command git config --get-regexp "^alias.${_param_1}\$" | sed -e 's/[^ ]* //')"
29 |
30 | local _exit_val
31 |
32 | case "$_expanded" in
33 | \!*)
34 | # check for !shell-command aliases
35 | _expanded="${_expanded:1}"
36 | shift
37 | eval "$_expanded \"\$@\""
38 | _exit_val=$?
39 | return $_exit_val
40 | ;;
41 | *)
42 | # expand aliases
43 | if [[ -n "$_expanded" ]]; then
44 | shift
45 | eval "git $_expanded \"\$@\""
46 | _exit_val=$?
47 | return $_exit_val
48 | fi
49 | ;;
50 | esac
51 |
52 | # Pre hooks
53 | if [[ -x "$_gitdir/hooks/pre-command-$1" ]]; then
54 | if ! "$_gitdir/hooks/pre-command-$1" "${@:2}"; then
55 | _exit_val=$?
56 | return $_exit_val
57 | fi
58 | fi
59 |
60 | # call git
61 | command git "$@"
62 | _exit_val=$?
63 |
64 | # Post hooks
65 | if [[ -x "$_gitdir/hooks/post-command-$1" ]]; then
66 | if ! "$_gitdir/hooks/post-command-$1" "${@:2}"; then
67 | _exit_val=$?
68 | return $_exit_val
69 | fi
70 | fi
71 |
72 | return $_exit_val
73 | }
74 |
75 | # Copyright (c) 2019, Steven Karas
76 | # Portions copyright (c) 2013, Rafael Kitover
77 | # All rights reserved.
78 | #
79 | # Redistribution and use in source and binary forms, with or without
80 | # modification, are permitted provided that the following conditions are met:
81 | #
82 | # Redistributions of source code must retain the above copyright notice, this
83 | # list of conditions and the following disclaimer.
84 | #
85 | # Redistributions in binary form must reproduce the above copyright notice, this
86 | # list of conditions and the following disclaimer in the documentation and/or
87 | # other materials provided with the distribution.
88 | #
89 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
90 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
91 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
92 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
93 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
94 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
95 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
96 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
97 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
98 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
99 |
--------------------------------------------------------------------------------
/.bash_lib.d/history.bash:
--------------------------------------------------------------------------------
1 |
2 | ## set up the bash history ##
3 | #############################
4 | # don't put duplicate lines in the history. See bash(1) for more options
5 | # ... or force ignoredups and ignorespace
6 | HISTCONTROL=ignorespace
7 |
8 | # append to the history file, don't overwrite it
9 | shopt -s histappend
10 | shopt -s cmdhist
11 |
12 | # for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
13 | HISTSIZE=
14 | HISTFILESIZE=
15 | HISTTIMEFORMAT="[%F %T] "
16 | if [[ ! -d "$HOME/.bash_history.d" ]]; then
17 | mkdir "$HOME/.bash_history.d"
18 | fi
19 | HISTFILE="$HOME/.bash_history.d/$USER@$HOSTNAME.history"
20 | HISTIGNORE="ll:ls:bg:fg:pwd:date"
21 |
22 | if [[ ! "$PROMPT_COMMAND" == *"history -a"* ]]; then
23 | PROMPT_COMMAND="${PROMPT_COMMAND:+$PROMPT_COMMAND; }history -a; history -c; history -r"
24 | fi
25 |
--------------------------------------------------------------------------------
/.bash_lib.d/is_remote.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function _current_tty() {
4 | # tmux confounds this, since it's a multiplexer. We need to find the client tty with the latest activity
5 | if [[ -n "$TMUX" ]]; then
6 | tmux list-clients -F '#{client_activity}:#{client_tty}' | sort -n | tail -n 1 | cut -d ':' -f 2
7 | return 0
8 | fi
9 |
10 | tty
11 | return 0
12 | }
13 |
14 | function _is_tty_remote() {
15 | # check if the given tty is connected to a remote user
16 | local tty_name="${1##/dev/}"
17 | local from
18 | from="$(w -h | tr -s ' ' 2>/dev/null | cut -d ' ' -f2-3 | grep -e "^$tty_name " | cut -d ' ' -f 2)"
19 | if [[ -z "$from" ]]; then
20 | # attempt to search for suffixes
21 | local extra_chomped="${tty_name##tty}"
22 | if [[ "$extra_chomped" != "$tty_name" ]]; then
23 | _is_tty_remote "$extra_chomped"
24 | local returnval=$?
25 | return $returnval
26 | fi
27 | # this can happen if we've su'd inside tmux
28 | # but it can also happen if we're running on a system with systemd, so assume we're local
29 | return 1
30 | elif [[ "$from" =~ :[[:digit:]]* ]]; then
31 | return 1
32 | elif [[ "$from" == "-" ]]; then
33 | return 1
34 | else
35 | return 0
36 | fi
37 | }
38 |
39 | function _proot() {
40 | # Based on a recursive version that depends on procfs I found on StackOverflow
41 | local pid="$$"
42 | local name=""
43 |
44 | while [[ "$pid" -ge 1 ]]; do
45 | read -r pid name < <(ps -o ppid= -o comm= -p "$pid")
46 | if [[ "$name" == "${1:-sshd}"* ]]; then
47 | return 0
48 | fi
49 | done
50 |
51 | exit 1
52 | }
53 |
54 | # check if the current BASH session is a "remote" session
55 | function is_remote() {
56 | # check if we're running ubuntu-server, if so, always consider it to be "remote"
57 | if type -t dpkg >/dev/null 2>&1; then
58 | if ! dpkg -s ubuntu-desktop >/dev/null 2>&1; then
59 | return 0
60 | fi
61 | fi
62 |
63 | # determine if the tty is associated with a remote connection
64 | local tty
65 | tty="$(_current_tty)"
66 | if _is_tty_remote "$tty"; then
67 | return 0
68 | fi
69 |
70 | return 1
71 | }
72 |
--------------------------------------------------------------------------------
/.bash_lib.d/toggle_remote.bash:
--------------------------------------------------------------------------------
1 | # shellcheck shell=bash
2 | # I have a workflow where sometimes I work on my laptop, and sometimes I ssh into it from home.
3 | # This leads to environment issues where I'd prefer to use a different editor based on my current workflow.
4 | # Normally, i would just detect if the bash session is inside SSH, but tmux confounds this.
5 | # So I'm building a set of functions to setup a local/remote environment
6 |
7 | function toggle_remote() {
8 | function _setup_local() {
9 | # wild guess, but good enough for most uses
10 | export DISPLAY=:0
11 | export VISUAL="subl -w"
12 | }
13 |
14 | function _setup_remote() {
15 | # wild guess, but good enough for most uses. not portable...
16 | # test is nonportable, and makes rough assumption about port used by ssh for X forwarding, so avoid it for now.
17 | # netstat -lnt 2>/dev/null | grep ':6010' >/dev/null && export DISPLAY=localhost:10.0
18 | export DISPLAY=localhost:10.0
19 | export VISUAL="nano"
20 | }
21 |
22 | local desired=""
23 | if [[ $# -eq 1 ]]; then
24 | desired="$1"
25 | else
26 | if is_remote; then
27 | desired="remote"
28 | else
29 | desired="local"
30 | fi
31 | fi
32 | if [[ "$desired" == "remote" ]]; then
33 | _setup_remote
34 | elif [[ "$desired" == "local" ]]; then
35 | _setup_local
36 | else
37 | echo "USAGE: toggle_remote [local|remote]"
38 | fi
39 |
40 | unset -f _setup_local
41 | unset -f _setup_remote
42 | }
43 |
--------------------------------------------------------------------------------
/.bash_lib.d/utility_functions.bash:
--------------------------------------------------------------------------------
1 |
2 | # expands the path of the given parameter
3 | function expand_path {
4 | if [[ -e "$1" ]]; then
5 | if [[ "$1" == "." ]]; then
6 | echo "$PWD"
7 | elif [[ "$(basename "$1")" == ".." ]]; then
8 | pushd "$1" >/dev/null
9 | echo "$PWD"
10 | popd >/dev/null
11 | else
12 | pushd "$(dirname "$1")" >/dev/null
13 | echo "$PWD/$(basename "$1")"
14 | popd >/dev/null
15 | fi
16 | else
17 | echo "$(expand_path "$(dirname "$1")")/$(basename "$1")"
18 | fi
19 | }
20 |
21 | # expands the path of any non-options in the given args
22 | function expand_args {
23 | ARGS=""
24 | for arg in "$@"; do
25 | if [[ "-" == "${arg:0:1}" ]]; then
26 | ARG=$arg
27 | else
28 | ARG=$(expand_path "$arg")
29 | fi
30 | ARGS="$ARGS $ARG"
31 | done
32 | echo "$ARGS"
33 | }
34 |
35 | # Cache an expensive, volatile command, caching the result for the specified number of seconds
36 | #
37 | # Arguments:
38 | # 1 - the path to the cache file
39 | # 2 - the command to generate the words for the cache
40 | # 3 - number of seconds to cache the results for (defaults to 10 seconds)
41 | function cache_command() {
42 | local gen_cache=
43 | local stat_options=
44 | local cache_period=${3:-10}
45 | case $(uname) in
46 | Darwin*) stat_options="-f%m" ;;
47 | *) stat_options="-c%Y" ;;
48 | esac
49 | if [[ ! -r "$1" ]]; then
50 | gen_cache=true
51 | elif (($(date +%s) - $(stat $stat_options "$1") > cache_period)); then
52 | gen_cache=true
53 | fi
54 | if [[ $gen_cache ]]; then
55 | eval "$2" > "$1"
56 | fi
57 | cat "$1"
58 | }
59 |
--------------------------------------------------------------------------------
/.bash_lib.d/window_commands.bash:
--------------------------------------------------------------------------------
1 |
2 | # Makes a best effort to set the title for the current terminal container
3 | #
4 | # This could be a window, tab, etc.
5 | #
6 | # Arguments:
7 | # 1 - the title to set
8 | function set_title() {
9 | case $TERM in
10 | screen*|tmux*)
11 | printf "\033k%s\033\\" "$1"
12 | ;;
13 | xterm*)
14 | printf "\033]0;%s\a" "$1"
15 | ;;
16 | *)
17 | ;;
18 | esac
19 | }
20 |
--------------------------------------------------------------------------------
/.bash_prompt:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ## include all the prompt libraries ##
4 | ######################################
5 | for prompt_lib in "$HOME/.bash_prompt.d/"*; do
6 | source "$prompt_lib"
7 | done
8 |
9 | ## build the actual prompt ##
10 | #############################
11 | # NOTE: the remote host is calculated at shell startup, and depend on the USER/HOSTNAME envvars
12 | PROMPT_CHUNK_SIMPLE="\[\e[${PREFERRED_COLOR:-33}m\]$(__remote_host)"
13 |
14 | # add git to the prompt
15 | GIT_PS1_SHOWDIRTYSTATE=true # shellcheck disable=SC2034
16 | GIT_PS1_SHOWSTASHSTATE=true # shellcheck disable=SC2034
17 | # GIT_PS1_SHOWUPSTREAM="false"
18 | GIT_PS1_SHOWUNTRACKEDFILES=true # shellcheck disable=SC2034
19 | GIT_PS1_SHOWBRANCH=true # shellcheck disable=SC2034
20 | PROMPT_CHUNK_GIT="\[\e[91m\]\$(__git_ps1 ' (%s) ')"
21 |
22 | # add svn to the prompt
23 | # SVN_PS1_SHOWDIRTYSTATE=
24 | # SVN_PS1_SHOWREVISION=
25 | # PS1="${PS1}\[\e[91m\]\$(__svn_ps1 ' (%s) ')"
26 |
27 | PS1="$PROMPT_CHUNK_SIMPLE\$(project_ps1)$PROMPT_CHUNK_GIT\[\e[${PREFERRED_COLOR:-33}m\]\$ \[\e[m\]"
28 |
29 | # helper function to fallback on a simpler prompt for when git is slow #
30 | ##############################################################################
31 | function fallback_prompt() {
32 | PS1="$PROMPT_CHUNK_SIMPLE\$(project_ps1)\[\e[${PREFERRED_COLOR:-33}m\]\$ \[\e[m\]"
33 | }
34 |
35 | # Change the terminal title when switching directories #
36 | ########################################################
37 | function project_set_title() {
38 | local title="$PROJECT_NAME"
39 | if is_remote; then
40 | title="$HOSTNAME${title:+:$title}"
41 | fi
42 | set_title "$title"
43 | }
44 | # CHDIR_COMMAND="${CHDIR_COMMAND}project_set_title;"
45 |
46 | unset color_prompt force_color_prompt
47 |
--------------------------------------------------------------------------------
/.bash_prompt.d/git-prompt.bash:
--------------------------------------------------------------------------------
1 | #sh/zsh git prompt support
2 | #
3 | # Copyright (C) 2006,2007 Shawn O. Pearce
4 | # Distributed under the GNU General Public License, version 2.0.
5 | #
6 | # This script allows you to see the current branch in your prompt.
7 | #
8 | # To enable:
9 | #
10 | # 1) Copy this file to somewhere (e.g. ~/.git-prompt.sh).
11 | # 2) Add the following line to your .bashrc/.zshrc:
12 | # source ~/.git-prompt.sh
13 | # 3) Change your PS1 to also show the current branch:
14 | # Bash: PS1='[\u@\h \W$(__git_ps1 " (%s)")]\$ '
15 | # ZSH: PS1='[%n@%m %c$(__git_ps1 " (%s)")]\$ '
16 | #
17 | # The argument to __git_ps1 will be displayed only if you are currently
18 | # in a git repository. The %s token will be the name of the current
19 | # branch.
20 | #
21 | # In addition, if you set GIT_PS1_SHOWDIRTYSTATE to a nonempty value,
22 | # unstaged (*) and staged (+) changes will be shown next to the branch
23 | # name. You can configure this per-repository with the
24 | # bash.showDirtyState variable, which defaults to true once
25 | # GIT_PS1_SHOWDIRTYSTATE is enabled.
26 | #
27 | # You can also see if currently something is stashed, by setting
28 | # GIT_PS1_SHOWSTASHSTATE to a nonempty value. If something is stashed,
29 | # then a '$' will be shown next to the branch name.
30 | #
31 | # If you would like to see if there're untracked files, then you can set
32 | # GIT_PS1_SHOWUNTRACKEDFILES to a nonempty value. If there're untracked
33 | # files, then a '%' will be shown next to the branch name.
34 | #
35 | # If you would like to see the difference between HEAD and its upstream,
36 | # set GIT_PS1_SHOWUPSTREAM="auto". A "<" indicates you are behind, ">"
37 | # indicates you are ahead, and "<>" indicates you have diverged. You
38 | # can further control behaviour by setting GIT_PS1_SHOWUPSTREAM to a
39 | # space-separated list of values:
40 | #
41 | # verbose show number of commits ahead/behind (+/-) upstream
42 | # legacy don't use the '--count' option available in recent
43 | # versions of git-rev-list
44 | # git always compare HEAD to @{upstream}
45 | # svn always compare HEAD to your SVN upstream
46 | #
47 | # By default, __git_ps1 will compare HEAD to your SVN upstream if it can
48 | # find one, or @{upstream} otherwise. Once you have set
49 | # GIT_PS1_SHOWUPSTREAM, you can override it on a per-repository basis by
50 | # setting the bash.showUpstream config variable.
51 |
52 | # __gitdir accepts 0 or 1 arguments (i.e., location)
53 | # returns location of .git repo
54 | __gitdir ()
55 | {
56 | # Note: this function is duplicated in git-completion.bash
57 | # When updating it, make sure you update the other one to match.
58 | if [ -z "${1-}" ]; then
59 | if [ -n "${__git_dir-}" ]; then
60 | echo "$__git_dir"
61 | elif [ -n "${GIT_DIR-}" ]; then
62 | test -d "${GIT_DIR-}" || return 1
63 | echo "$GIT_DIR"
64 | elif [ -d .git ]; then
65 | echo .git
66 | else
67 | git rev-parse --git-dir 2>/dev/null
68 | fi
69 | elif [ -d "$1/.git" ]; then
70 | echo "$1/.git"
71 | else
72 | echo "$1"
73 | fi
74 | }
75 |
76 | # stores the divergence from upstream in $p
77 | # used by GIT_PS1_SHOWUPSTREAM
78 | __git_ps1_show_upstream ()
79 | {
80 | local key value
81 | local svn_remote svn_url_pattern count n
82 | local upstream=git legacy="" verbose=""
83 |
84 | svn_remote=()
85 | # get some config options from git-config
86 | local output="$(git config -z --get-regexp '^(svn-remote\..*\.url|bash\.showupstream)$' 2>/dev/null | tr '\0\n' '\n ')"
87 | while read -r key value; do
88 | case "$key" in
89 | bash.showupstream)
90 | GIT_PS1_SHOWUPSTREAM="$value"
91 | if [[ -z "${GIT_PS1_SHOWUPSTREAM}" ]]; then
92 | p=""
93 | return
94 | fi
95 | ;;
96 | svn-remote.*.url)
97 | svn_remote[ $((${#svn_remote[@]} + 1)) ]="$value"
98 | svn_url_pattern+="\\|$value"
99 | upstream=svn+git # default upstream is SVN if available, else git
100 | ;;
101 | esac
102 | done <<< "$output"
103 |
104 | # parse configuration values
105 | for option in ${GIT_PS1_SHOWUPSTREAM}; do
106 | case "$option" in
107 | git|svn) upstream="$option" ;;
108 | verbose) verbose=1 ;;
109 | legacy) legacy=1 ;;
110 | esac
111 | done
112 |
113 | # Find our upstream
114 | case "$upstream" in
115 | git) upstream="@{upstream}" ;;
116 | svn*)
117 | # get the upstream from the "git-svn-id: ..." in a commit message
118 | # (git-svn uses essentially the same procedure internally)
119 | local svn_upstream=($(git log --first-parent -1 \
120 | --grep="^git-svn-id: \(${svn_url_pattern#??}\)" 2>/dev/null))
121 | if [[ 0 -ne ${#svn_upstream[@]} ]]; then
122 | svn_upstream=${svn_upstream[ ${#svn_upstream[@]} - 2 ]}
123 | svn_upstream=${svn_upstream%@*}
124 | local n_stop="${#svn_remote[@]}"
125 | for ((n=1; n <= n_stop; n++)); do
126 | svn_upstream=${svn_upstream#${svn_remote[$n]}}
127 | done
128 |
129 | if [[ -z "$svn_upstream" ]]; then
130 | # default branch name for checkouts with no layout:
131 | upstream=${GIT_SVN_ID:-git-svn}
132 | else
133 | upstream=${svn_upstream#/}
134 | fi
135 | elif [[ "svn+git" = "$upstream" ]]; then
136 | upstream="@{upstream}"
137 | fi
138 | ;;
139 | esac
140 |
141 | # Find how many commits we are ahead/behind our upstream
142 | if [[ -z "$legacy" ]]; then
143 | count="$(git rev-list --count --left-right \
144 | "$upstream"...HEAD 2>/dev/null)"
145 | else
146 | # produce equivalent output to --count for older versions of git
147 | local commits
148 | if commits="$(git rev-list --left-right "$upstream"...HEAD 2>/dev/null)"
149 | then
150 | local commit behind=0 ahead=0
151 | for commit in $commits
152 | do
153 | case "$commit" in
154 | "<"*) ((behind++)) ;;
155 | *) ((ahead++)) ;;
156 | esac
157 | done
158 | count="$behind $ahead"
159 | else
160 | count=""
161 | fi
162 | fi
163 |
164 | # calculate the result
165 | if [[ -z "$verbose" ]]; then
166 | case "$count" in
167 | "") # no upstream
168 | p="" ;;
169 | "0 0") # equal to upstream
170 | p="=" ;;
171 | "0 "*) # ahead of upstream
172 | p=">" ;;
173 | *" 0") # behind upstream
174 | p="<" ;;
175 | *) # diverged from upstream
176 | p="<>" ;;
177 | esac
178 | else
179 | case "$count" in
180 | "") # no upstream
181 | p="" ;;
182 | "0 0") # equal to upstream
183 | p=" u=" ;;
184 | "0 "*) # ahead of upstream
185 | p=" u+${count#0 }" ;;
186 | *" 0") # behind upstream
187 | p=" u-${count% 0}" ;;
188 | *) # diverged from upstream
189 | p=" u+${count#* }-${count% *}" ;;
190 | esac
191 | fi
192 |
193 | }
194 |
195 |
196 | # __git_ps1 accepts 0 or 1 arguments (i.e., format string)
197 | # returns text to add to bash PS1 prompt (includes branch name)
198 | __git_ps1 ()
199 | {
200 | local g="$(__gitdir)"
201 | if [ -n "$g" ]; then
202 | local r=""
203 | local b=""
204 | if [ -f "$g/rebase-merge/interactive" ]; then
205 | r="|REBASE-i"
206 | b="$(cat "$g/rebase-merge/head-name")"
207 | elif [ -d "$g/rebase-merge" ]; then
208 | r="|REBASE-m"
209 | b="$(cat "$g/rebase-merge/head-name")"
210 | else
211 | if [ -d "$g/rebase-apply" ]; then
212 | if [ -f "$g/rebase-apply/rebasing" ]; then
213 | r="|REBASE"
214 | elif [ -f "$g/rebase-apply/applying" ]; then
215 | r="|AM"
216 | else
217 | r="|AM/REBASE"
218 | fi
219 | elif [ -f "$g/MERGE_HEAD" ]; then
220 | r="|MERGING"
221 | elif [ -f "$g/CHERRY_PICK_HEAD" ]; then
222 | r="|CHERRY-PICKING"
223 | elif [ -f "$g/BISECT_LOG" ]; then
224 | r="|BISECTING"
225 | fi
226 |
227 | branch_name="$(git symbolic-ref HEAD 2>/dev/null)" || {
228 |
229 | b="$(
230 | case "${GIT_PS1_DESCRIBE_STYLE-}" in
231 | (contains)
232 | git describe --contains HEAD ;;
233 | (branch)
234 | git describe --contains --all HEAD ;;
235 | (describe)
236 | git describe HEAD ;;
237 | (* | default)
238 | git describe --tags --exact-match HEAD ;;
239 | esac 2>/dev/null)" ||
240 |
241 | b="$(cut -c1-7 "$g/HEAD" 2>/dev/null)..." ||
242 | b="unknown"
243 | b="($b)"
244 | }
245 | if [ "true" = "${GIT_PS1_SHOWBRANCH-}" ]; then
246 | if [ -n "${branch_name-}" ]; then
247 | b="$branch_name"
248 | fi
249 | fi
250 | fi
251 |
252 | local w=""
253 | local i=""
254 | local s=""
255 | local u=""
256 | local c=""
257 | local p=""
258 |
259 | if [ "true" = "$(git rev-parse --is-inside-git-dir 2>/dev/null)" ]; then
260 | if [ "true" = "$(git rev-parse --is-bare-repository 2>/dev/null)" ]; then
261 | c="BARE:"
262 | else
263 | b="GIT_DIR!"
264 | fi
265 | elif [ "true" = "$(git rev-parse --is-inside-work-tree 2>/dev/null)" ]; then
266 | if [ -n "${GIT_PS1_SHOWDIRTYSTATE-}" ]; then
267 | if [ "$(git config --bool bash.showDirtyState)" != "false" ]; then
268 | git diff --no-ext-diff --quiet --exit-code || w="*"
269 | if git rev-parse --quiet --verify HEAD >/dev/null; then
270 | git diff-index --cached --quiet HEAD -- || i="+"
271 | else
272 | i="#"
273 | fi
274 | fi
275 | fi
276 | if [ -n "${GIT_PS1_SHOWSTASHSTATE-}" ]; then
277 | git rev-parse --verify refs/stash >/dev/null 2>&1 && s="$"
278 | fi
279 |
280 | if [ -n "${GIT_PS1_SHOWUNTRACKEDFILES-}" ]; then
281 | if [ -n "$(git ls-files --others --exclude-standard)" ]; then
282 | u="?"
283 | fi
284 | fi
285 |
286 | if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
287 | __git_ps1_show_upstream
288 | fi
289 | fi
290 |
291 | local f="$w$i$s$u"
292 | printf -- "${1:- (%s)}" "$c${b##refs/heads/}${f:+ $f}$r$p"
293 | fi
294 | }
295 |
--------------------------------------------------------------------------------
/.bash_prompt.d/preferred_color.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | if [[ -z "$PREFERRED_COLOR" ]]; then
4 | if type -t python >/dev/null 2>&1; then
5 | # determine the color based on the user and hostname
6 | export PREFERRED_COLOR
7 | PREFERRED_COLOR="$(python -c 'import binascii,getpass,socket;id="%s@%s" % (getpass.getuser(), socket.gethostname(),);hsh=binascii.crc32(id);idx=(hsh % (38-31)) + 31;print(idx)')"
8 | fi
9 | fi
10 |
--------------------------------------------------------------------------------
/.bash_prompt.d/project-prompt.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # USAGE: __transcend_root SPECIAL_DIR
4 | #
5 | # Transcends the current path, checking for the existence of the given "special" dir
6 | function __transcend_root() {
7 | local current=""
8 | local parent="$PWD"
9 |
10 | until [[ "$current" == "$parent" ]]; do
11 | if [[ -e "$parent/$1" ]]; then
12 | printf "%s" "$parent"
13 | return 0
14 | fi
15 |
16 | current="$parent"
17 | parent=$(dirname "$parent")
18 | done
19 | }
20 |
21 | # resolve symlinks to find the apparent project root
22 | #
23 | # USAGE: __project_resolve_symlinks PROJECT_ROOT [APPARENT_PWD]
24 | function __project_resolve_symlinks() {
25 | local previous=""
26 | local current="${2:-$PWD}"
27 |
28 | until [[ "$previous" == "$current" ]]; do
29 | if [[ "$current" -ef "$1" ]]; then
30 | printf "%s" "$current"
31 | return 0
32 | fi
33 |
34 | previous="$current"
35 | current=$(dirname "$current")
36 | done
37 |
38 | return 1
39 | }
40 |
41 | # determine the root of the project
42 | function project_root() {
43 | local result
44 |
45 | local gitroot
46 | gitroot="$(git rev-parse --show-toplevel 2>/dev/null)"
47 | if [[ -n "$gitroot" ]]; then
48 | result="$(__project_resolve_symlinks "$gitroot" "$PWD")"
49 | fi
50 |
51 | if [[ "$result" = "." ]]; then
52 | result="$PWD"
53 | fi
54 |
55 | if [[ -n "$result" ]]; then
56 | printf "%s" "$result"
57 | fi
58 | }
59 |
60 | function project_ps1() {
61 | if [[ -n "$PROJECT_NAME" ]]; then
62 | printf "%s" "[${PROJECT_NAME}]${PROJECT_PATH}"
63 | else
64 | if [[ "$PWD" == "$HOME" ]]; then
65 | printf "%s" "~"
66 | else
67 | printf "%s" "${PWD##*/}"
68 | fi
69 | fi
70 | }
71 |
72 | function _project_update_name() {
73 | PROJECT_ROOT="$(project_root)"
74 | if [[ -d "$PROJECT_ROOT" ]]; then
75 | PROJECT_NAME="$(basename "$PROJECT_ROOT")"
76 | PROJECT_PATH="${PWD##$PROJECT_ROOT}"
77 | else
78 | PROJECT_NAME=""
79 | PROJECT_PATH=""
80 | fi
81 | }
82 |
83 | if [[ ! "$CHDIR_COMMAND" == *"_project_update_name"* ]]; then
84 | CHDIR_COMMAND="_project_update_name${CHDIR_COMMAND:+;$CHDIR_COMMAND}"
85 | fi
86 |
--------------------------------------------------------------------------------
/.bash_prompt.d/remote-prompt.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # show the user and hostname if we're running on a remote server
4 |
5 | # conditionally show the hostname if we're running in an ssh connection
6 |
7 | function __remote_host() {
8 | if is_remote; then
9 | echo "$USER@$HOSTNAME "
10 | fi
11 | }
12 |
--------------------------------------------------------------------------------
/.bashlib:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # If not running interactively, don't do anything
4 | case $- in
5 | *i*) ;;
6 | *) return;;
7 | esac
8 |
9 | # check the window size after each command and, if necessary,
10 | # update the values of LINES and COLUMNS.
11 | shopt -s checkwinsize
12 |
13 | # typo correction for bash
14 | shopt -s cdspell
15 | if [[ ! "${BASH_VERSION:0:3}" < "4.2" ]]; then
16 | shopt -s dirspell
17 | fi
18 |
19 | if [[ -f "$HOME"/.bash_features ]]; then
20 | . "$HOME"/.bash_features
21 | fi
22 |
23 | # Alias definitions
24 | if [[ -f "$HOME"/.bash_aliases ]]; then
25 | . "$HOME"/.bash_aliases
26 | fi
27 |
28 | ## include the bash libraries ##
29 | ################################
30 | for lib in "$HOME"/.bash_lib.d/*; do
31 | . "$lib"
32 | done
33 |
34 | ## set up bash completion ##
35 | ############################
36 | # enable programmable completion features (you don't need to enable
37 | # this, if it's already enabled in /etc/bash.bashrc and /etc/profile
38 | # sources /etc/bash.bashrc).
39 | set completion-ignore-case on
40 |
41 | for prog in "$HOME"/.bash_completion.d/*; do
42 | . "$prog"
43 | done
44 |
45 | if [ -f "$HOME"/.bash_prompt ]; then
46 | . "$HOME"/.bash_prompt
47 | fi
48 |
49 | # this loads a well known bashrc based on the REMOTE_USER env variable
50 | # the recommended use of this is to serve as a dispatching script on shared servers
51 | #
52 | # the expected location is .bash_users.d/$REMOTE_USER.bash
53 | #
54 | # To set the REMOTE_USER envvar, you need to turn on PermitUserEnvironment in your sshd,
55 | # and add `environment="REMOTE_USER=steven"` before the related key in ~/.ssh/authorized_keys
56 | #
57 | # PuTTY users can set the REMOTE_USER envvar via the Connection > Data settings,
58 | # but sshd must have the "AllowEnv REMOTE_USER" line added to sshd_config.
59 |
60 | if [[ -n "$REMOTE_USER" ]]; then
61 | if [[ -f "$HOME/.bash_users.d/$REMOTE_USER.bash" ]]; then
62 | . "$HOME/.bash_users.d/$REMOTE_USER.bash"
63 | fi
64 | fi
65 |
66 | if [[ -n "$TMUX" ]]; then
67 | if [[ -f "$HOME/.tmux_profile" ]]; then
68 | . "$HOME/.tmux_profile"
69 | fi
70 | fi
71 |
--------------------------------------------------------------------------------
/.bashrc_macos:
--------------------------------------------------------------------------------
1 | #!/bin/usr/env bash
2 |
3 | function notify() {
4 | osascript -e "display notification \"$*\" with title \"Terminal\""
5 | }
6 |
7 | alias lstat='stat -L'
8 | alias trim_trailing_spaces='sed -i "" -E -e '"'"'s/[[:space:]]*$//'"'"''
9 |
10 | if [[ -n "$TERMINUS_SUBLIME" ]]; then
11 | bind '"\e[1;3C": forward-word'
12 | bind '"\e[1;3D": backward-word'
13 | fi
14 |
--------------------------------------------------------------------------------
/.bashrc_ubuntu:
--------------------------------------------------------------------------------
1 | #!/bin/usr/env bash
2 |
3 | # Add an "alert" alias for long running commands. Use like so:
4 | # sleep 10; alert
5 | function notify() {
6 | notify-send --urgency=low -i "$([ $? = 0 ] && echo terminal || echo error)" "$@"
7 | }
8 |
9 |
10 | if [[ "$(lsb_release -s -c)" == "xenial" ]]; then
11 | alias run_upgrades='sudo apt update && sudo apt dist-upgrade -y --autoremove'
12 | else
13 | alias run_upgrades='sudo apt-get update && sudo apt-get dist-upgrade -y --autoremove'
14 | fi
15 |
16 | if [[ -x /usr/bin/lesspipe ]]; then
17 | export LESS="$LESS -R"
18 | eval "$(SHELL=/bin/sh lesspipe)"
19 | fi
20 |
21 | function topswap() {
22 | for file in /proc/*/status ; do awk '/VmSwap|^Pid/{printf $2 " " $3}END{ print ""}' "$file"; done | sort -k 2 -n -r | less
23 | }
24 |
25 | function sizeof() {
26 | local file="$1"
27 | du -sh "$file" | sort -r -h | head -10
28 | }
29 |
30 | function clipboard() {
31 | # determine which direction we're flowing: in or out
32 | if [[ -t 0 ]]; then
33 | xclip -out -selection clipboard
34 | else
35 | xclip -selection clipboard
36 | fi
37 | }
38 |
--------------------------------------------------------------------------------
/.config/git/attributes:
--------------------------------------------------------------------------------
1 | *.py diff=python
2 | *.pyx diff=python gitlab-language=python
3 |
--------------------------------------------------------------------------------
/.config/git/ignore:
--------------------------------------------------------------------------------
1 | # Compiled source #
2 | ###################
3 | *.class
4 | *.dll
5 | *.exe
6 | *.o
7 | *.so
8 | *.dylib
9 | *.gem
10 | *.pyc
11 |
12 | # VHDL #
13 | ########
14 | work-obj93.cf
15 | *.exe.sim/
16 | *.wdb
17 | *.prj
18 | _xmsgs/
19 | isim/
20 | xilinxsim.ini
21 | isim.cmd
22 | iseconfig/
23 |
24 | # Packages #
25 | ############
26 | # it's better to unpack these files and commit the raw source
27 | # git has its own built in compression methods
28 | *.7z
29 | *.dmg
30 | *.gz
31 | *.iso
32 | *.rar
33 | *.tar
34 | *.zip
35 |
36 | # Logs and databases #
37 | ######################
38 | *.log
39 | *.sql
40 | *.sqlite
41 |
42 | # OS generated files #
43 | ######################
44 | .DS_Store
45 | ._*
46 | .Spotlight-V100
47 | .Trashes
48 | ehthumbs.db
49 | Thumbs.db
50 |
51 | # Tooling generated files #
52 | ###########################
53 | .sonar/
54 | .yardoc/
55 | .mypy_cache/
56 | .stfolder/
57 | .ipynb_checkpoints/
58 | .project/
59 | .pydevproject/
60 | .ropeproject/
61 | .stignore
62 | .stignore-patterns.txt
63 |
64 | # Temporary files #
65 | ###################
66 | *~
67 | *.pid
68 | .ipynb_checkpoints/
69 | .mypy_cache/
70 | .bundle/
71 | .stfolder/
72 | *.orig
73 | *.rej
74 | *.vcd
75 | .bundle/
76 |
77 | # My personal typical trash #
78 | #############################
79 | sandbox.*
80 | .issues/
81 | .docs/
82 |
--------------------------------------------------------------------------------
/.config/starship.toml:
--------------------------------------------------------------------------------
1 | [time]
2 | disabled = false
3 | format = "[$time]($style) "
4 | time_format = "(%F %T)"
5 |
6 | [status]
7 | disabled = false
8 | format = "[\\[$status\\]]($style) "
9 |
10 | [terraform]
11 | format = "via [$symbol$version]($style) "
12 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .weechat/logs
2 | .weechat/sec.conf
3 | .profile
4 | .bash_profile
5 | .bashrc
6 | .bash_features
7 | .vim/.netrwhist
8 | .config-real
9 |
--------------------------------------------------------------------------------
/.hammerspoon/Spoons/ReloadConfiguration.spoon/docs.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "Command": [],
4 | "Constant": [],
5 | "Constructor": [],
6 | "Deprecated": [],
7 | "Field": [],
8 | "Function": [],
9 | "Method": [
10 | {
11 | "def": "ReloadConfiguration:bindHotkeys(mapping)",
12 | "desc": "Binds hotkeys for ReloadConfiguration",
13 | "doc": "Binds hotkeys for ReloadConfiguration\n\nParameters:\n * mapping - A table containing hotkey modifier/key details for the following items:\n * reloadConfiguration - This will cause the configuration to be reloaded",
14 | "name": "bindHotkeys",
15 | "parameters": [
16 | " * mapping - A table containing hotkey modifier/key details for the following items:",
17 | " * reloadConfiguration - This will cause the configuration to be reloaded"
18 | ],
19 | "signature": "ReloadConfiguration:bindHotkeys(mapping)",
20 | "stripped_doc": "",
21 | "type": "Method"
22 | },
23 | {
24 | "def": "ReloadConfiguration:start()",
25 | "desc": "Start ReloadConfiguration",
26 | "doc": "Start ReloadConfiguration\n\nParameters:\n * None",
27 | "name": "start",
28 | "parameters": [
29 | " * None"
30 | ],
31 | "signature": "ReloadConfiguration:start()",
32 | "stripped_doc": "",
33 | "type": "Method"
34 | }
35 | ],
36 | "Variable": [
37 | {
38 | "def": "ReloadConfiguration.watch_paths",
39 | "desc": "List of directories to watch for changes, defaults to hs.configdir",
40 | "doc": "List of directories to watch for changes, defaults to hs.configdir",
41 | "name": "watch_paths",
42 | "signature": "ReloadConfiguration.watch_paths",
43 | "stripped_doc": "",
44 | "type": "Variable"
45 | }
46 | ],
47 | "desc": "Adds a hotkey to reload the hammerspoon configuration, and a pathwatcher to automatically reload on changes.",
48 | "doc": "Adds a hotkey to reload the hammerspoon configuration, and a pathwatcher to automatically reload on changes.\n\nDownload: [https://github.com/Hammerspoon/Spoons/raw/master/Spoons/ReloadConfiguration.spoon.zip](https://github.com/Hammerspoon/Spoons/raw/master/Spoons/ReloadConfiguration.spoon.zip)",
49 | "items": [
50 | {
51 | "def": "ReloadConfiguration:bindHotkeys(mapping)",
52 | "desc": "Binds hotkeys for ReloadConfiguration",
53 | "doc": "Binds hotkeys for ReloadConfiguration\n\nParameters:\n * mapping - A table containing hotkey modifier/key details for the following items:\n * reloadConfiguration - This will cause the configuration to be reloaded",
54 | "name": "bindHotkeys",
55 | "parameters": [
56 | " * mapping - A table containing hotkey modifier/key details for the following items:",
57 | " * reloadConfiguration - This will cause the configuration to be reloaded"
58 | ],
59 | "signature": "ReloadConfiguration:bindHotkeys(mapping)",
60 | "stripped_doc": "",
61 | "type": "Method"
62 | },
63 | {
64 | "def": "ReloadConfiguration:start()",
65 | "desc": "Start ReloadConfiguration",
66 | "doc": "Start ReloadConfiguration\n\nParameters:\n * None",
67 | "name": "start",
68 | "parameters": [
69 | " * None"
70 | ],
71 | "signature": "ReloadConfiguration:start()",
72 | "stripped_doc": "",
73 | "type": "Method"
74 | },
75 | {
76 | "def": "ReloadConfiguration.watch_paths",
77 | "desc": "List of directories to watch for changes, defaults to hs.configdir",
78 | "doc": "List of directories to watch for changes, defaults to hs.configdir",
79 | "name": "watch_paths",
80 | "signature": "ReloadConfiguration.watch_paths",
81 | "stripped_doc": "",
82 | "type": "Variable"
83 | }
84 | ],
85 | "name": "ReloadConfiguration",
86 | "stripped_doc": "\nDownload: [https://github.com/Hammerspoon/Spoons/raw/master/Spoons/ReloadConfiguration.spoon.zip](https://github.com/Hammerspoon/Spoons/raw/master/Spoons/ReloadConfiguration.spoon.zip)",
87 | "submodules": [],
88 | "type": "Module"
89 | }
90 | ]
--------------------------------------------------------------------------------
/.hammerspoon/Spoons/ReloadConfiguration.spoon/init.lua:
--------------------------------------------------------------------------------
1 | --- === ReloadConfiguration ===
2 | ---
3 | --- Adds a hotkey to reload the hammerspoon configuration, and a pathwatcher to automatically reload on changes.
4 | ---
5 | --- Download: [https://github.com/Hammerspoon/Spoons/raw/master/Spoons/ReloadConfiguration.spoon.zip](https://github.com/Hammerspoon/Spoons/raw/master/Spoons/ReloadConfiguration.spoon.zip)
6 |
7 | local obj = {}
8 | obj.__index = obj
9 |
10 | -- Metadata
11 | obj.name = "ReloadConfiguration"
12 | obj.version = "1.0"
13 | obj.author = "Jon Lorusso "
14 | obj.homepage = "https://github.com/Hammerspoon/Spoons"
15 | obj.license = "MIT - https://opensource.org/licenses/MIT"
16 |
17 |
18 | --- ReloadConfiguration.watch_paths
19 | --- Variable
20 | --- List of directories to watch for changes, defaults to hs.configdir
21 | obj.watch_paths = { hs.configdir }
22 |
23 | --- ReloadConfiguration:bindHotkeys(mapping)
24 | --- Method
25 | --- Binds hotkeys for ReloadConfiguration
26 | ---
27 | --- Parameters:
28 | --- * mapping - A table containing hotkey modifier/key details for the following items:
29 | --- * reloadConfiguration - This will cause the configuration to be reloaded
30 | function obj:bindHotkeys(mapping)
31 | local def = { reloadConfiguration = hs.fnutils.partial(hs.reload, self) }
32 | hs.spoons.bindHotkeysToSpec(def, mapping)
33 | end
34 |
35 | --- ReloadConfiguration:start()
36 | --- Method
37 | --- Start ReloadConfiguration
38 | ---
39 | --- Parameters:
40 | --- * None
41 | function obj:start()
42 | self.watchers = {}
43 | for _,dir in pairs(self.watch_paths) do
44 | self.watchers[dir] = hs.pathwatcher.new(dir, hs.reload):start()
45 | end
46 | return self
47 | end
48 |
49 | return obj
50 |
--------------------------------------------------------------------------------
/.hammerspoon/init.lua:
--------------------------------------------------------------------------------
1 |
2 | -- Configuration hot reload
3 | hs.loadSpoon("ReloadConfiguration")
4 | spoon.ReloadConfiguration:start()
5 |
6 | -- just keeping this here as an example. do not use!!!
7 | hs.hotkey.bind({"cmd", "alt", "ctrl"}, "W", function()
8 | hs.notify.new({title="Hammerspoon", informativeText="Hello World"}):send()
9 | end)
10 |
11 |
--------------------------------------------------------------------------------
/.inputrc:
--------------------------------------------------------------------------------
1 | # site wide configs usually include some goodies
2 | $include /etc/inputrc
3 |
4 | # Turn on a bunch of completion features (taken from https://www.topbug.net/blog/2017/07/31/inputrc-for-humans)
5 | set colored-stats On
6 | set completion-ignore-case On
7 | set completion-prefix-display-length 3
8 | set mark-symlinked-directories On
9 | set show-all-if-ambiguous On
10 | set show-all-if-unmodified On
11 | set visible-stats On
12 |
13 | # iTerm2 send nonstandard modified arrow keys, so support their defaults
14 | "\e[1;3D": backward-word
15 | "\e[1;3C": forward-word
16 | "\e[1;5D": backward-word
17 | "\e[1;5C": forward-word
18 | "\e[1;9D": backward-word
19 | "\e[1;9C": forward-word
20 | "\e[C": forward-char
21 | "\e[D": backward-char
22 |
23 | # Search through history
24 | "\e[A": history-search-backward
25 | "\e[B": history-search-forward
26 |
--------------------------------------------------------------------------------
/.ipython/ipython_config.py:
--------------------------------------------------------------------------------
1 | try:
2 | c
3 | except NameError:
4 | c = get_config()
5 |
6 | c.InteractiveShellApp.setdefault('exec_lines', [])
7 | c.InteractiveShellApp.exec_lines.extend([
8 | 'import sys, time, dis, os',
9 | ])
10 | c.TerminalInteractiveShell.confirm_exit = False
11 |
12 | import IPython
13 | if IPython.version_info > (4,0,0):
14 | profile_dir = IPython.paths.locate_profile()
15 | else: # should apply to at least IPython 0.13.1-1.2.1
16 | profile_dir = IPython.utils.path.locate_profile()
17 |
18 | c.InteractiveShellApp.setdefault('exec_files', [])
19 | c.InteractiveShellApp.exec_files.extend([
20 | profile_dir + '/ipython_helpers.py'
21 | ])
22 |
--------------------------------------------------------------------------------
/.ipython/ipython_helpers.py:
--------------------------------------------------------------------------------
1 |
2 | def stacktrace(thread):
3 | import threading
4 | if isinstance(thread, threading.Thread):
5 | tid = thread.ident
6 | elif isinstance(thread, (int,long,)):
7 | tid = thread
8 | else:
9 | return
10 |
11 | import sys
12 | frame = sys._current_frames()[tid]
13 | if not frame:
14 | return
15 |
16 | import traceback
17 | traceback.print_stack(frame)
18 |
19 |
20 | def common_apis(*args):
21 | """
22 | Find a common subset of public attributes between all the given args
23 | """
24 | return reduce(lambda r,e: r & e, [set([k for k in clazz.__dict__.keys() if k[0] != '_']) for clazz in args])
25 |
26 |
27 | def elapsed(f):
28 | """
29 | print the elapsed time while running the given callable
30 | """
31 | from datetime import datetime
32 | start = datetime.utcnow()
33 | try:
34 | f()
35 | finally:
36 | print('elapsed: %f' % (datetime.utcnow() - start).total_seconds())
37 |
38 |
39 | def histtime(f, time=5.0):
40 | """
41 | Run the given callable as many times as possible in the allotted time, and print statistics on how long it took to run
42 | """
43 | import pandas
44 | from datetime import datetime,timedelta
45 | start = datetime.utcnow()
46 | finish_by = start + timedelta(seconds = time)
47 | times = []
48 | while True:
49 | if start > finish_by:
50 | break
51 | try:
52 | f()
53 | except:
54 | pass
55 | prev = start
56 | start = datetime.utcnow()
57 | times.append((start - prev).total_seconds())
58 | print(pandas.Series(times).describe(percentiles = [ 0.25, 0.5, 0.75, 0.9, 0.95, 0.99, 0.999 ]))
59 |
60 |
61 | def get_exception(f):
62 | """
63 | Easy way to get an exception thrown by the given callable
64 | """
65 | try:
66 | return f()
67 | except Exception as e:
68 | return e
69 |
--------------------------------------------------------------------------------
/.irbrc:
--------------------------------------------------------------------------------
1 | require 'irb/completion'
2 | require 'pp'
3 | IRB.conf[:AUTO_INDENT]=true
4 |
5 | class Object
6 | def interesting_methods
7 | self.methods - Object.new.methods
8 | end
9 | end
10 |
11 | class Module
12 | def interesting_methods
13 | self.methods - Math.methods
14 | end
15 | end
16 |
17 | class Class
18 | def interesting_methods
19 | self.methods - Class.methods
20 | end
21 | end
22 |
23 | module Enumerable
24 | def progress
25 | return to_enum(__method__) unless block_given?
26 | progress = 0
27 | out_of = self.size
28 | return self.each do |item|
29 | progress += 1
30 | if STDOUT.isatty
31 | print "\r#{progress} out of #{out_of}"
32 | print "\n" if progress == out_of
33 | end
34 | yield item
35 | end
36 | end
37 | end
38 |
39 | class Integer
40 | def magic_number
41 | return self.to_s(16).chars.each_slice(2).to_a.map(&:join).map{|d|d.to_i(16)}.map(&:chr).join
42 | end
43 | end
44 |
45 | class String
46 | def magic_number
47 | return self.chars.map(&:ord).map{|c|c.to_s(16)}.join.to_i(16)
48 | end
49 | end
50 |
51 | class Hash
52 | def to_schema
53 | self.reduce({}) do |result, item|
54 | result[item[0]] = item[1].class
55 | result[item[0]] = item[1].to_schema if item[1].respond_to? :to_schema
56 | result
57 | end
58 | end
59 | end
60 |
61 | module Enumerable
62 | def to_schema
63 | self.map{|item| item.respond_to?(:to_schema) ? item.to_schema : item.class}.uniq
64 | end
65 | end
66 |
--------------------------------------------------------------------------------
/.lessfilter:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # portions based on https://github.com/nojhan/dotfiles/blob/master/lessfilter.sh
4 |
5 | case "$1" in
6 | *.rb|Rakefile|*.rake|*.gemspec|Gemfile)
7 | type -t pygmentize >/dev/null 2>&1 && pygmentize -f 256 -l ruby "$1"
8 | ;;
9 | *.awk|*.groff|*.java|*.js|*.m4|*.php|*.pl|*.pm|*.pod|*.sh|*.ad[asb]|*.asm|*.inc|*.[ch]|*.[ch]pp|*.[ch]xx|*.cc|*.hh|*.lsp|*.l|*.pas|*.p|*.xml|*.xps|*.xsl|*.axp|*.ppd|*.pov|*.diff|*.patch|*.py|*.sql|*.ebuild|*.eclass)
10 | type -t pygmentize >/dev/null 2>&1 && pygmentize -f 256 "$1"
11 | ;;
12 | .bashrc|.bash_aliases|.bash_environment)
13 | type -t pygmentize >/dev/null 2>&1 && pygmentize -f 256 -l sh "$1"
14 | ;;
15 | *)
16 | # fall back on file
17 | case "$(file -L -b -i "$1" | cut -f 1)" in
18 | text/x-shellscript)
19 | pygmentize -f 256 -l sh "$1"
20 | ;;
21 | *)
22 | exit 1
23 | ;;
24 | esac
25 | ;;
26 | esac
27 |
28 | exit 0
29 |
--------------------------------------------------------------------------------
/.pryrc:
--------------------------------------------------------------------------------
1 | class Object
2 | def interesting_methods
3 | self.methods - Object.new.methods
4 | end
5 | end
6 |
7 | class Module
8 | def interesting_methods
9 | self.methods - Math.methods
10 | end
11 | end
12 |
13 | class Class
14 | def interesting_methods
15 | self.methods - Class.methods
16 | end
17 | end
18 |
19 | module Enumerable
20 | def progress
21 | return to_enum(__method__) unless block_given?
22 | progress = 0
23 | out_of = self.size
24 | return self.each do |item|
25 | progress += 1
26 | if STDOUT.isatty
27 | print "\r#{progress} out of #{out_of}"
28 | print "\n" if progress == out_of
29 | end
30 | yield item
31 | end
32 | end
33 | end
34 |
--------------------------------------------------------------------------------
/.psqlrc:
--------------------------------------------------------------------------------
1 | \set COMP_KEYWORD_CASE upper
2 | \x auto
3 | \pset null ø
4 |
--------------------------------------------------------------------------------
/.tmux.conf:
--------------------------------------------------------------------------------
1 | # Version-specific commands [grumble, grumble]
2 | # See: https://github.com/tmux/tmux/blob/master/CHANGES
3 | run-shell "tmux setenv -g TMUX_VERSION $(tmux -V | cut -c 6-)"
4 |
5 | # List of plugins
6 | set -g @plugin 'tmux-plugins/tpm'
7 | set -g @plugin 'tmux-plugins/tmux-sensible'
8 | set -g @plugin 'tmux-plugins/tmux-yank'
9 |
10 | if-shell -b '[ "$(printf "%s\n%s" "$TMUX_VERSION" "2.1" | sort -n | head -n 1)" != "2.1" ]' "\
11 | set -g mouse-select-pane on; set -g mode-mouse on; \
12 | set -g mouse-resize-pane on; set -g mouse-select-window on; \
13 | "
14 |
15 | # In version 2.1 "mouse" replaced the previous 4 mouse options
16 | if-shell -b '[ "$(printf "%s\n%s" "$TMUX_VERSION" "2.1" | sort -n | head -n 1)" = "2.1" ]' "\
17 | set -g mouse on; \
18 | set -g @plugin 'nhdaly/tmux-better-mouse-mode' ; \
19 | set -g @emulate-scroll-for-no-mouse-alternate-buffer on; \
20 | "
21 |
22 | # UTF8 is autodetected in 2.2 onwards, but errors if explicitly set
23 | if-shell -b '[ "$(printf "%s\n%s" "$TMUX_VERSION" "2.2" | sort -n | head -n 1)" != "2.2" ]' "\
24 | set -g utf8 on; set -g status-utf8 on; set -g mouse-utf8 on; \
25 | "
26 |
27 | bind-key -n C-PPage previous-window
28 | bind-key -n C-NPage next-window
29 | bind-key -n C-T new-window
30 | bind-key r source-file ~/.tmux.conf \; display 'Reloaded Configuration'
31 | bind-key C-i rotate-window -D
32 |
33 | set-window-option -g xterm-keys on
34 | if-shell 'tmux show-options -g update-environment | grep -v -q \\bDISPLAY\\b' "\
35 | set-option -ga update-environment ' DISPLAY'; \
36 | "
37 | if-shell 'tmux show-options -g update-environment | grep -v -q \\bREMOTE_USER\\b' "\
38 | set-option -ga update-environment ' REMOTE_USER'; \
39 | "
40 |
41 | # I use around 32 panes, and ubuntu's terminal at 1920x1080 maximized gives around 200 columns
42 | # I want to limit the total memory allocated for scrollback to no more than 512M
43 | # 512M total / 32 pane = 16M per pane
44 | # 16M per pane / 256B per line = 64K lines
45 | set-option -g history-limit 60000
46 |
47 | # set window title
48 | set-option -g set-titles on
49 | set-option -g set-titles-string '[#S:#I #H]'
50 | # number windows from 1 (easier to switch on keyboard)
51 | set-option -g base-index 1
52 | set-window-option -g pane-base-index 1
53 |
54 | # automatically install TPM on new machines
55 | if "test ! -d ~/.tmux/plugins/tpm" \
56 | "run 'git clone https://github.com/tmux-plugins/tpm ~/.tmux/plugins/tpm && ~/.tmux/plugins/tpm/bin/install_plugins'"
57 | run '~/.tmux/plugins/tpm/tpm'
58 |
--------------------------------------------------------------------------------
/.tmux_profile:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # This file is sourced in interactive sessions launched inside tmux
4 |
5 | export PROMPT_COMMAND="${PROMPT_COMMAND:+$PROMPT_COMMAND; }toggle_remote"
6 |
7 |
--------------------------------------------------------------------------------
/.vim/ftplugin/python.vim:
--------------------------------------------------------------------------------
1 | setlocal tabstop=4
2 | setlocal softtabstop=4
3 | setlocal shiftwidth=4
4 | setlocal smarttab
5 | setlocal expandtab
6 | setlocal autoindent
7 |
--------------------------------------------------------------------------------
/.vim/vimrc:
--------------------------------------------------------------------------------
1 | set background=dark
2 |
3 | if has("autocmd")
4 | filetype plugin indent on
5 | endif
6 |
7 | set showmatch
8 | set showcmd
9 |
10 | syntax enable
11 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # What is this?
2 |
3 | These are my personal bashfiles.
4 | There are many like them, but these are mine.
5 |
6 | Strictly speaking, there's a lot more than just bash here, but the rest is shared configuration files.
7 |
8 | ## Bash-related features
9 |
10 | * Prompt
11 | * Git integration with the prompt
12 | * Infinite bash history
13 | * Colors!
14 | * Automatic display of hostname if remotely connected
15 | * Others
16 | * Convenience aliases
17 |
18 | ## Other program settings
19 |
20 | * git
21 | * gdb
22 | * ipython
23 | * irb
24 | * tmux
25 | * psql
26 | * vim (not great work on my part, I don't use it much)
27 | * less
28 |
29 | ## Common utilities
30 |
31 | * SSH - ssh-ca, ssh-acme, ssh-manager
32 | * various helpers
33 |
34 | # Installation
35 |
36 | Either git clone and run `install_bashfiles.bash`, or:
37 |
38 | ```
39 | curl https://karas.io/bashfiles | bash
40 | ```
41 |
42 | which does the exact same.
43 |
--------------------------------------------------------------------------------
/bin/bash_historian:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function _search_history() {
4 | if [[ $# -lt 1 ]]; then
5 | echo "USAGE: $(basename "$0") search PATTERN"
6 | return 2
7 | fi
8 |
9 | history -n
10 | # TODO: figure out how to get history to only emit commands without entry numbers
11 | HISTTIMEFORMAT="" history | awk '{$1=""; sub(/^[ \t]+/, ""; print $0}' | grep -e "$@" | sort | uniq -c | sort -n -r | head
12 | }
13 |
14 | function show_usage() {
15 | local prog="$(basename "$0")"
16 | cat <<-HELPMESSAGE
17 | $prog search PATTERN # search your bash history for the given pattern
18 | $prog /PATTERN # shorthand for search
19 | HELPMESSAGE
20 | if [[ "$1" == "-v" || "$1" == "--verbose" ]]; then
21 | cat <<-VERBOSEHELP
22 |
23 | This script is a pure bash implementation of historian, including search and frequency analysis
24 | VERBOSEHELP
25 | fi
26 | }
27 |
28 | function main() {
29 | local subcommand="$1"
30 | shift
31 | case "$subcommand" in
32 | /*)
33 | _search_history "${subcommand:1}" "$@"
34 | exit $?
35 | ;;
36 | search)
37 | _search_history "$@"
38 | exit $?
39 | ;;
40 | -?|-h|--help|help|"")
41 | show_usage "$@"
42 | exit $?
43 | ;;
44 | *)
45 | echo "Unknown command: $subcommand"
46 | echo ""
47 | show_usage
48 | exit 2
49 | ;;
50 | esac
51 | }
52 |
53 | if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
54 | main "$@"
55 | fi
56 |
57 |
--------------------------------------------------------------------------------
/bin/cidr:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import ipaddress
4 | import argparse
5 |
6 |
7 | def cidr_info(address):
8 | return dict(
9 | cidr=address,
10 | netname=address.network_address,
11 | broadcast=address.broadcast_address,
12 | num_hosts=address.num_addresses,
13 | )
14 |
15 |
16 | def render_cidr_report(info):
17 | return '\n'.join(l.strip() for l in """
18 | CIDR: {cidr}
19 | Network Name: {netname}
20 | Broadcast Address: {broadcast}
21 | Number of hosts: {num_hosts}
22 | """.split('\n')).format(**info).strip()
23 |
24 |
25 | def main():
26 | parser = argparse.ArgumentParser(description='Gives information about a network')
27 | parser.add_argument('network', help='Either an IPv4 address or network')
28 | args = parser.parse_args()
29 |
30 | info = cidr_info(ipaddress.ip_network(args.network, strict=False))
31 | print(render_cidr_report(info))
32 |
33 |
34 | if __name__ == '__main__':
35 | main()
36 |
--------------------------------------------------------------------------------
/bin/describe:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import sys
4 | import pandas
5 |
6 | def render_for_data(data, template):
7 | series = pandas.Series(data)
8 | description = series.describe(percentiles = [ 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99 ])
9 | template_args = description.to_dict()
10 | template_args['full'] = str(description)
11 | template_args['short'] = str(series.describe(percentiles = [0.5, 0.75, 0.9, 0.95, 0.99]))
12 | template_args['sum'] = series.sum()
13 | return template.format(**template_args)
14 |
15 | def main():
16 | if len(sys.argv) == 1:
17 | template = '{short}'
18 | else:
19 | template = ' '.join(sys.argv[1:])
20 |
21 | # fail fast if the template string isn't a good one
22 | _ = render_for_data([1, 2, 3.4], template)
23 | print(render_for_data([float(line) for line in [line.strip() for line in sys.stdin.readlines() if line.strip()]], template))
24 |
25 | if __name__ == '__main__':
26 | sys.exit(main() or 0)
27 |
--------------------------------------------------------------------------------
/bin/field:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import re
4 | import sys
5 | import argparse
6 |
7 |
8 | def match(args):
9 | for line in sys.stdin:
10 | matches = re.findall(args.pattern, line)
11 | if matches:
12 | print(args.delimiter.join(matches))
13 |
14 |
15 | def field(args):
16 | for line in sys.stdin:
17 | tokens = line.strip().split(args.delimiter)
18 | field = tokens[args.field:args.field + 1]
19 | if field:
20 | print(field)
21 |
22 |
23 | def before(args):
24 | for line in sys.stdin:
25 | tokens = line.strip().split(args.delimiter)
26 | previous_token = None
27 | for token in tokens:
28 | if token == args.token:
29 | if previous_token is not None:
30 | print(previous_token)
31 | break
32 | previous_token = token
33 |
34 |
35 | def after(args):
36 | for line in sys.stdin:
37 | tokens = line.strip().split(args.delimiter)
38 | print_token = False
39 | for token in tokens:
40 | if token == args.token:
41 | print_token = True
42 | continue
43 | if print_token:
44 | print(token)
45 | break
46 |
47 |
48 | def parse_args():
49 | parser = argparse.ArgumentParser(description='Extract fields from stdin', usage='COMMAND | %(prog)s MODE [OPTIONS]')
50 | subparsers = parser.add_subparsers(help='the mode of operation')
51 |
52 | after_parser = subparsers.add_parser('after', help='extract the field after a token')
53 | after_parser.add_argument('token', help='extract the field immediately following this token')
54 | after_parser.add_argument(
55 | '-d', '--delimiter', dest='delimiter', default=' ',
56 | help='delimiter to use between tokens',
57 | )
58 | after_parser.set_defaults(func=after)
59 |
60 | before_parser = subparsers.add_parser('before', help='extract the field before a token')
61 | before_parser.add_argument('token', help='extract the field immediately preceding this token')
62 | before_parser.add_argument(
63 | '-d', '--delimiter', dest='delimiter', default=' ',
64 | help='delimiter to use between tokens',
65 | )
66 | before_parser.set_defaults(func=before)
67 |
68 | field_parser = subparsers.add_parser('field', help='extract the field by number')
69 | field_parser.add_argument('field', help='the field to extract')
70 | field_parser.add_argument(
71 | '-d', '--delimiter', dest='delimiter', default=' ',
72 | help='delimiter to use between fields',
73 | )
74 | field_parser.set_defaults(func=field)
75 |
76 | match_parser = subparsers.add_parser('match', help='extract fields that match the given pattern')
77 | match_parser.add_argument('pattern', help='the pattern to match against')
78 | match_parser.add_argument(
79 | '-d', '--delimiter', dest='delimiter', default=' ',
80 | help='delimiter to use between fields',
81 | )
82 | match_parser.set_defaults(func=match)
83 |
84 | args = parser.parse_args()
85 | if sys.stdin.isatty():
86 | parser.print_usage()
87 | return None
88 |
89 | return args
90 |
91 |
92 | def main():
93 | args = parse_args()
94 | if args is None:
95 | sys.exit(1)
96 |
97 | args.func(args)
98 |
99 |
100 | if __name__ == '__main__':
101 | main()
102 |
--------------------------------------------------------------------------------
/bin/fs_advisory_lock:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # We use creating a hardlink as the locking primitive here.
4 | # flock is a great primitive for local processes, but this advisory lock is intended to be used as a distributed lock
5 |
6 | function _lock() {
7 | local lockfile="$1"
8 | local lease_term="${2:-300}" # 5 minute default lease
9 | shift 2
10 | while ! _attempt_lock "$lockfile" "$lease_term" 0 "$@"; do
11 | sleep 0.1
12 | done
13 | return 0
14 | }
15 |
16 | function _trylock() {
17 | local lockfile="$1"
18 | local lease_term="${2:-300}" # 5 minute default lease
19 | shift 2
20 | _attempt_lock "$lockfile" "$lease_term" 1 "$@"
21 | return $?
22 | }
23 |
24 | function _attempt_lock() {
25 | local lockfile="$1"
26 | local lease_term="$2"
27 | local show_errors="$3"
28 | shift 3
29 |
30 | local now
31 | now="$(date +%s)"
32 | let expiry="$now + $lease_term"
33 | printf "%s\n%s\n" "$$" "$expiry" > "$lockfile.$expiry"
34 | printf "%s\n" "$@" >> "$lockfile.$expiry"
35 | while ! ln "$lockfile.$expiry" "$lockfile" 2>/dev/null; do
36 | local current_expiry
37 | current_expiry="$(head -n2 "$lockfile" 2>/dev/null | tail -n1)"
38 | if [[ $? != 0 ]]; then
39 | # this can happen when someone has manually clobbered the lock state
40 | [[ $show_errors -eq 1 ]] && >&2 printf "Lock in partial state. May be safe to bust the lock: %s\n" "$lockfile"
41 | return 1
42 | else
43 | if [[ "$current_expiry" -lt "$now" ]]; then
44 | [[ $show_errors -eq 1 ]] && >&2 printf "Lock lease expired. May be safe to bust the lock: %s\n" "$lockfile"
45 | # The lease has expired, and the lock can be semi-safely busted open
46 | # however, there is no easy way to avoid race conditions here, so I don't provide a solution yet
47 | return 1
48 | else
49 | # clean up our attempt to grab the lock
50 | rm "$lockfile.$expiry"
51 | return 1
52 | fi
53 | fi
54 | done
55 |
56 | return 0
57 | }
58 |
59 | function _renew() {
60 | local lockfile="$1"
61 | local lease_term="${2:-300}" # 5 minute default lease
62 |
63 | local now
64 | now="$(date +%s)"
65 | let expiry="$now + $lease_term"
66 | local current_expiry
67 | current_expiry="$(head -n2 "$lockfile" 2>/dev/null | tail -n1)"
68 | printf "%s\n%s" "$$" "$expiry" > "$lockfile.$current_expiry"
69 | mv "$lockfile.$current_expiry" "$lockfile.$expiry"
70 |
71 | return 0
72 | }
73 |
74 | function _unlock() {
75 | local lockfile="$1"
76 |
77 | local current_expiry
78 | current_expiry="$(head -n2 "$lockfile" 2>/dev/null | tail -n1)"
79 | rm -f "$lockfile.$current_expiry"
80 | rm -f "$lockfile"
81 | }
82 |
83 | function show_usage() {
84 | local prog
85 | prog="$(basename "$0")"
86 | cat <<-HELPMESSAGE
87 | $prog lock [LOCKNAME] [LEASE_TERM] # acquire the advisory lock, blocking until it's acquired
88 | $prog trylock [LOCKNAME] [LEASE_TERM] # exits with status 0 if the lock is acquired, otherwise 1
89 | $prog renew [LOCKNAME] [LEASE_TERM] # renew a currently held lock
90 | $prog unlock [LOCKNAME] # release an advisory lock
91 | HELPMESSAGE
92 | if [[ "$1" == "-v" || "$1" == "--verbose" ]]; then
93 | cat <<-VERBOSEHELP
94 |
95 | This script provides a simple locking mechanism for use in external scripts.
96 | The default lease term is 5 minutes.
97 | Any extra arguments to lock or trylock are saved in the lockfile to ease debugging.
98 | VERBOSEHELP
99 | fi
100 | }
101 |
102 | function main() {
103 | local subcommand="$1"
104 | shift
105 | case "$subcommand" in
106 | r|renew)
107 | _renew "$@"
108 | exit $?
109 | ;;
110 | t|trylock)
111 | _trylock "$@"
112 | exit $?
113 | ;;
114 | l|lock)
115 | _lock "$@"
116 | exit $?
117 | ;;
118 | u|unlock)
119 | _unlock "$@"
120 | exit $?
121 | ;;
122 | -?|-h|--help|help|"")
123 | show_usage "$@"
124 | exit $?
125 | ;;
126 | *)
127 | echo "Unknown command: $subcommand"
128 | echo ""
129 | show_usage
130 | exit 2
131 | ;;
132 | esac
133 | }
134 |
135 | if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
136 | main "$@"
137 | fi
138 |
--------------------------------------------------------------------------------
/bin/git-alias:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | if [[ $# == 0 ]]; then
4 | git config --get-regexp 'alias.*' | colrm 1 6 | sed 's/[ ]/ = /'
5 | elif [[ $# == 2 ]]; then
6 | git config --global alias.\""$1"\" \""$2"\"
7 | else
8 | echo "usage: git alias " >&2
9 | exit 1
10 | fi
11 |
--------------------------------------------------------------------------------
/bin/git-prank:
--------------------------------------------------------------------------------
1 | #!/usr/bin/ruby
2 |
3 | require 'time'
4 |
5 | Messages = <<-MESSAGES.split("\n").map(&:strip)
6 | The grasshopper lies heavy
7 | Don't feed the mogwai after midnight
8 | Drop it like it's hot
9 | MESSAGES
10 |
11 | def main
12 | if ARGV == ["-"] || ARGV.size == 0
13 | input = STDIN.read
14 | else
15 | input = ARGV.join(" ")
16 | end
17 |
18 | input.chars.map(&:ord).each_slice(4) do |slice|
19 | slice << 0 until slice.size == 4
20 | slice = slice.pack('CCCC').unpack('N')[0]
21 | puts "git commit --allow-empty --date=#{Time.at(slice).iso8601} -m '#{Messages.sample(1)[0]}'"
22 | end
23 | end
24 |
25 | if __FILE__ == $PROGRAM_NAME
26 | main
27 | end
28 |
--------------------------------------------------------------------------------
/bin/git-surgery:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require 'open3'
4 | require 'fileutils'
5 | require 'thunder'
6 |
7 | # Some guidance taken from
8 | # https://www.kernel.org/pub/software/scm/git/docs/v1.7.10.1/howto/recover-corrupted-blob-object.txt
9 |
10 | # for each corrupt packfile:
11 | # #rename the packfile
12 | # mv $packfile $packfile.corrupted
13 | # #unpack as many objects as possible from the packfile
14 | # git unpack-objects -r < $packfile.corrupted
15 | # rm -f $packfile.corrupted
16 | # for each missing blob:
17 | # #pull in from the "closest" git remote
18 | # ssh $remote 'cd $path && git cat-file blob $blob' | git hash-object -w --stdin
19 | # verify the integrity of the repo
20 |
21 | class GitSurgery
22 | include Thunder
23 |
24 | default_command :surgery
25 |
26 | desc "surgery", "Automatically fix corrupt packfiles"
27 | def surgery
28 | # diagnose the issue (find any corrupt packfiles)
29 | corrupt_packfiles.each do |corrupt_packfile|
30 | # unpack as many objects as possible from the packfile
31 | unpack_packfile(corrupt_packfile)
32 | end
33 |
34 | # find any missing blobs
35 | missing_blobs.each do |missing_blob|
36 | # fetch the blobs from the remotes (maybe include a network distance for the remotes?)
37 | fetch_remote_blob(missing_blob)
38 | end
39 | end
40 |
41 | def corrupt_packfiles
42 | fsck_result, _ = Open3.capture2e("git fsck --strict --full --no-dangling")
43 | packfiles = fsck_result.lines.reduce([]) do |result, line|
44 | case line
45 | when /^error: (.*\.pack) SHA1 checksum mismatch/
46 | packfile = $~[1]
47 | result << packfile
48 | end
49 | result
50 | end
51 | return packfiles.sort.uniq
52 | end
53 |
54 | def unpack_packfile(packfiles)
55 | puts "unpacking #{packfile}"
56 | FileUtils.mv(packfile, "#{packfile}.corrupt")
57 | pid = spawn("git unpack-objects", in: packfile, out: "/dev/null", err: [:child, :out])
58 | _, status = Process.wait2(pid)
59 | raise "unpacking failed" unless status.success?
60 | FileUtils.rm("#{packfile}.corrupt", force: true)
61 | end
62 |
63 | def missing_blobs
64 | fsck_result, _ = Open3.capture2e("git fsck --strict --full --no-dangling")
65 | blobs = fsck_result.lines.reduce([]) do |result, line|
66 | case line
67 | when /missing blob (.*)/
68 | missing_blob = $~[1]
69 | result << missing_blob
70 | end
71 | result
72 | end
73 | return blobs.sort.uniq
74 | end
75 |
76 | desc "fetch_remote_blob"
77 | def fetch_remote_blob(blob)
78 | remotes.each do |name, spec|
79 | begin
80 | case spec[:fetch_url]
81 | when /^ssh:\/\/([^@]*)@([^\/]*)\/(.*)$/
82 | user, host, path = $~.captures
83 | fetch_via_ssh(user, host, path, blob)
84 | when /^git:\/\/([^\/]*)\/(.*)$/
85 | host, path = $~.captures
86 | fetch_via_git(host, path, blob)
87 | when /^([^@:]*)@([^:]*):(.*)$/
88 | user, host, path = $~.captures
89 | fetch_via_ssh(user, host, path, blob)
90 | when /^https?:\/\/(.*)$/
91 | # try to git fetch the remote blob
92 | fetch_via_http(spec[:fetch_url], blob)
93 | when /^file:\/\/(.*)$/
94 | path = $~[1]
95 | fetch_via_file(path, blob)
96 | else
97 | # assume it's a path
98 | fetch_via_file(spec[:fetch_url], blob)
99 | end
100 | return true
101 | rescue
102 | # NOP
103 | end
104 | end
105 | return false
106 | end
107 |
108 | def fetch_via_ssh(user, host, path, blob)
109 | ssh_command = "ssh #{user}@#{host} 'cd #{path} && git cat-file blob #{blob}'"
110 | git_hash_object_command = "git hash-object -w --stdin"
111 | Open3.pipeline(ssh_command, git_hash_object_command, out: "/dev/null")
112 | end
113 |
114 | def fetch_via_git(host, path, blob)
115 | raise NotImplementedError
116 | end
117 |
118 | def fetch_via_file(path, blob)
119 | puts "fetching #{blob} from #{path}"
120 | git_cat_file_command = "git -C #{path} cat-file blob #{blob}"
121 | git_hash_object_command = "git hash-object -w --stdin"
122 | Open3.pipeline(git_cat_file_command, git_hash_object_command, out: "/dev/null")
123 | end
124 |
125 | def fetch_via_http(url, blob)
126 | raise NotImplementedError
127 | end
128 |
129 | def fetch_via_ftp(url, blob)
130 | raise NotImplementedError
131 | end
132 |
133 | def remotes
134 | return @remotes if @remotes
135 | @remotes = {}
136 | `git remote -v show`.split("\n").each do |remotespec|
137 | match = /(?\S+)\s+(?.+) \((?fetch|push)\)/.match(remotespec)
138 | puts remotespec unless match
139 | next unless match
140 | @remotes[match[:remote_name]] ||= {}
141 | case match[:url_type]
142 | when "fetch"
143 | @remotes[match[:remote_name]][:fetch_url] = match[:remote_url]
144 | when "push"
145 | @remotes[match[:remote_name]][:push_url] = match[:remote_url]
146 | end
147 | end
148 | return @remotes
149 | end
150 | end
151 |
152 | GitSurgery.new.start
153 |
--------------------------------------------------------------------------------
/bin/graceful_kill:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | shopt -s extglob
4 |
5 | # this program is meant to send a sequence of signals to the given program, so long as it is still alive
6 |
7 | function graceful_kill() {
8 | local target_pid="$1"
9 | shift
10 | local action
11 | for action in "$@"; do
12 | # first, check that the process still exists
13 | kill -0 "$target_pid" &>/dev/null
14 | if [[ $? -eq 1 ]]; then
15 | # we didn't reach the end of our list...
16 | return 1
17 | fi
18 | case "$action" in
19 | +([0-9]))
20 | sleep "$action"
21 | ;;
22 | *)
23 | kill "$action" "$target_pid" &>/dev/null
24 | ;;
25 | esac
26 | done
27 | }
28 |
29 | function main() {
30 | if [[ $# -lt 2 ]]; then
31 | cat <<-USAGE
32 | Usage: $(basename "$0") PID [ACTION...]
33 |
34 | Where each ACTION is either the number of seconds to sleep, or a signal to send to the target process
35 | USAGE
36 | exit 1
37 | fi
38 | graceful_kill "$@"
39 | }
40 |
41 | main "$@"
42 |
--------------------------------------------------------------------------------
/bin/groupby:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Groups the input by a given column, printing the specified aggregations.
4 |
5 | Groups are defined as consecutive lines of input that have the same value in the group column.
6 | This means it works best with input that is already sorted.
7 |
8 | EXAMPLES:
9 | python3 -c '[print(i, i**2, i % 2) for i in range(10)]' | sort -k 3 | tr ' ' '\t' | groupby -g3 --min 2
10 | python3 -c '[print(i, i**2, i % 2) for i in range(10)]' | sort -k 3 | tr ' ' '\t' | groupby -g3 --max 2
11 | python3 -c '[print(i, i**2, i % 2) for i in range(10)]' | sort -k 3 | tr ' ' '\t' | groupby -g3 --join 2
12 | python3 -c '[print(i, i**2, i % 2) for i in range(10)]' | sort -k 3 | tr ' ' '\t' | groupby -g3 --count
13 | """
14 |
15 | # Example:
16 |
17 | import sys
18 | import argparse
19 |
20 | from signal import signal, SIGPIPE, SIG_DFL
21 |
22 |
23 | def getopt():
24 | parser = argparse.ArgumentParser(usage='%(prog)s -g COLUMN [OPTIONS]')
25 | parser.add_argument('-g', '--group', type=int, metavar='COLUMN', help='the field number to group by')
26 | parser.add_argument('-d', '--delimiter', default='\t', metavar='DELIM', help='use DELIM instead of TAB as a field separator')
27 | parser.add_argument('--stdin-tty', action='store_true', help='allow stdin to be a tty')
28 |
29 | output_options = parser.add_argument_group()
30 | output_options.add_argument('--min', type=int, action='append', default=[], metavar='COLUMN', help='output the lexicographical minimum value of COLUMN')
31 | output_options.add_argument('--max', type=int, action='append', default=[], metavar='COLUMN', help='output the lexicographical maximum value of COLUMN')
32 | output_options.add_argument('--join', type=int, action='append', default=[], metavar='COLUMN', help='join the values of this COLUMN')
33 | output_options.add_argument('--first', type=int, action='append', default=[], metavar='COLUMN', help='output the first value of this COLUMN')
34 | output_options.add_argument('--last', type=int, action='append', default=[], metavar='COLUMN', help='output the last value of this COLUMN')
35 | output_options.add_argument('--count', action='store_true', help='output the number of records in the group')
36 |
37 | return parser.parse_args()
38 |
39 |
40 | class BaseAggregation:
41 | def __init__(self, column=None):
42 | self.column = column
43 |
44 | def new_group(self):
45 | pass
46 |
47 | def aggregate(self, record):
48 | return self._aggregate(record[column_to_offset(self.column)])
49 |
50 | def _aggregate(self, field):
51 | raise NotImplementedError
52 |
53 | def output(self):
54 | raise NotImplementedError
55 |
56 |
57 | class Aggregation(BaseAggregation):
58 | def __init__(self, column=None):
59 | self.column = column
60 | self.state = None
61 |
62 | def new_group(self):
63 | self.state = None
64 |
65 | def output(self):
66 | return self.state
67 |
68 |
69 | class AggregateMax(Aggregation):
70 | def _aggregate(self, field):
71 | if self.state is None:
72 | self.state = field
73 | if self.state < field:
74 | self.state = field
75 |
76 |
77 | class AggregateMin(Aggregation):
78 | def _aggregate(self, field):
79 | if self.state is None:
80 | self.state = field
81 | if self.state > field:
82 | self.state = field
83 |
84 |
85 | class AggregateJoin(Aggregation):
86 | def new_group(self):
87 | self.state = []
88 |
89 | def _aggregate(self, field):
90 | self.state.append(field)
91 |
92 | def output(self):
93 | return ','.join(self.state)
94 |
95 |
96 | class AggregateFirst(Aggregation):
97 | def _aggregate(self, field):
98 | if self.state is None:
99 | self.state = field
100 |
101 |
102 | class AggregateLast(Aggregation):
103 | def _aggregate(self, field):
104 | self.state = field
105 |
106 |
107 | class AggregateCount(Aggregation):
108 | def new_group(self):
109 | self.state = 0
110 |
111 | def aggregate(self, record):
112 | self.state += 1
113 |
114 | def output(self):
115 | return str(self.state)
116 |
117 |
118 | def column_to_offset(column):
119 | if column > 0:
120 | return column - 1
121 | else:
122 | return column
123 |
124 |
125 | def main():
126 | options = getopt()
127 |
128 | if not options.stdin_tty and sys.stdin.isatty():
129 | print('STDIN is a tty. You probably want to pipe something. Use --stdin-tty to override this error', file=sys.stderr)
130 | return
131 |
132 | # die on pipe errors
133 | signal(SIGPIPE, SIG_DFL)
134 |
135 | aggregations = []
136 | for column in options.max:
137 | aggregations.append(AggregateMax(column))
138 | for column in options.min:
139 | aggregations.append(AggregateMin(column))
140 | for column in options.join:
141 | aggregations.append(AggregateJoin(column))
142 | for column in options.first:
143 | aggregations.append(AggregateFirst(column))
144 | for column in options.last:
145 | aggregations.append(AggregateLast(column))
146 | if options.count:
147 | aggregations.append(AggregateCount())
148 |
149 | current_group = None
150 | for aggregation in aggregations:
151 | aggregation.new_group()
152 |
153 | for line in sys.stdin:
154 | # chop off the line terminator
155 | line = line[:-1]
156 | fields = line.split(options.delimiter)
157 | group_field = fields[column_to_offset(options.group)]
158 | if current_group is None:
159 | current_group = group_field
160 |
161 | if current_group != group_field:
162 | print(options.delimiter.join([current_group] + [aggregation.output() for aggregation in aggregations]))
163 | current_group = group_field
164 | for aggregation in aggregations:
165 | aggregation.new_group()
166 |
167 | for aggregation in aggregations:
168 | aggregation.aggregate(fields)
169 |
170 | print(options.delimiter.join([current_group] + [aggregation.output() for aggregation in aggregations]))
171 |
172 |
173 | if __name__ == '__main__':
174 | main()
175 |
--------------------------------------------------------------------------------
/bin/install_dispatching_hook:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | HOOKS_DIR="$(git rev-parse --git-dir)/hooks"
4 | HOOKS="applypatch-msg pre-applypatch post-applypatch pre-commit prepare-commit-msg commit-msg post-commit pre-rebase post-checkout post-merge pre-receive update post-receive post-update pre-auto-gc pre-svn-rebase post-svn-rebase pre-svn-dcommit post-svn-dcommit"
5 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
6 | DISPATCHING_HOOK="$(dirname "$DIR")/githooks/dispatching_hook"
7 |
8 | # Follow symbolic links (needed because -m is a GNU extension to readlink)
9 | function resolve_symlink() {
10 | local resolved_path="$1"
11 | while [[ -h "$resolved_path" ]]; do
12 | resolved_dir="$( cd -P "$(dirname "$resolved_path")" && pwd )"
13 | resolved_path="$(readlink -n "$resolved_path")"
14 | # handle relative symlinks
15 | [[ "$resolved_path" != /* ]] && resolved_path="$resolved_dir/$resolved_path"
16 | done
17 | echo "$resolved_path"
18 | }
19 |
20 | function relocate_hook() {
21 | # move the hook out of the way, unless it's a symlink to the dispatching hook
22 | local hook_name="$1"
23 | local hook_path="$HOOKS_DIR/$hook_name"
24 | if [[ ! -e "$hook_path" ]]; then
25 | return 0
26 | fi
27 | if [[ -h "$hook_path" ]]; then
28 | local resolved_symlink="$(resolve_symlink "$hook_path")"
29 | if [[ "$resolved_symlink" == "$(resolve_symlink "$DISPATCHING_HOOK")" ]]; then
30 | return 0
31 | fi
32 | fi
33 |
34 | mv "$hook_path" "$hook_path.local"
35 | }
36 |
37 | # install the given hook
38 | function install_hook() {
39 | local hook_name="$1"
40 | relocate_hook "$hook_name"
41 | ln -sf "$DISPATCHING_HOOK" "$HOOKS_DIR/$hook_name"
42 | }
43 |
44 | for hook in $HOOKS; do
45 | install_hook "$hook"
46 | done
47 |
--------------------------------------------------------------------------------
/bin/jsonschema:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require 'json'
4 |
5 | # take in a series of json documents from ARGV[0] or STDIN
6 | infile = ARGV[0]
7 | infile ||= "-"
8 |
9 | if infile == "-"
10 | infile = STDIN
11 | else
12 | infile = File.open(infile)
13 | end
14 |
15 | schema = []
16 |
17 | def hash_to_schema(hash)
18 | return nil unless hash.is_a? Hash
19 | result = []
20 | hash.each do |k,v|
21 | result << k.to_s
22 | case v
23 | when Hash
24 | subkeys = hash_to_schema(v).map{|sk| "#{k}.#{sk}" }
25 | result = merge_schemas(result, subkeys)
26 | when Array
27 | subkeys = merge_schemas(*v.map{|sv| hash_to_schema(sv) }.compact).map{|sk| "#{k}[].#{sk}"}
28 | result = merge_schemas(result, ["#{k}[]"], subkeys)
29 | end
30 | end
31 | return result
32 | end
33 |
34 | def merge_schemas(*schemas)
35 | schemas.reduce([]) { |unified,schema| unified |= schema }
36 | end
37 |
38 | def remove_redundant_keys(keys)
39 | result = []
40 | keys.each do |key|
41 | result << key if not keys.any? {|other| other != key && other.start_with?(key) }
42 | end
43 | result
44 | end
45 |
46 | # p hash_to_schema({"foo": 1})
47 | # p hash_to_schema(1)
48 | # p hash_to_schema("foo")
49 | # p hash_to_schema({foo: [1, 2 ,3]})
50 | # puts hash_to_schema({foo: [{bar: 1}, {baz: 2}]})
51 | # p hash_to_schema({foo: {bar: 1, baz: 2}})
52 | # p remove_redundant_keys(["foo", "foo[]"])
53 |
54 | # exit 0
55 |
56 | while true
57 | line = infile.gets
58 | break unless line
59 | line.strip!
60 | next if line.empty?
61 | json = JSON.parse(line)
62 | schema = merge_schemas(schema, hash_to_schema(json))
63 | end
64 |
65 | schema = remove_redundant_keys(schema)
66 | schema.sort.each { |key| puts key }
67 |
68 | infile.close
69 |
--------------------------------------------------------------------------------
/bin/jsonschema2:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require 'json'
4 |
5 | # take in a series of json documents from ARGV[0] or STDIN
6 | infile = ARGV[0]
7 | infile ||= "-"
8 |
9 | if infile == "-"
10 | infile = STDIN
11 | else
12 | infile = File.open(infile)
13 | end
14 |
15 | schema = {}
16 |
17 | def hash_to_schema(hash)
18 | return nil unless hash.is_a? Hash
19 | result = {}
20 | hash.each do |k,v|
21 | result[k] ||= []
22 | result[k] << v.class
23 | case v
24 | when Hash
25 | subschema = hash_to_schema(v).map{|sk,v| ["#{k}.#{sk}", v]}.to_h
26 | result = merge_schemas(result, subschema)
27 | when Array
28 | subschema = merge_schemas(*v.map{|sv| hash_to_schema(sv)}.compact).map{|sk, v| ["#{k}[].#{sk}", v]}.to_h
29 | result = merge_schemas(result, subschema)
30 | end
31 | end
32 | return result
33 | end
34 |
35 | def merge_schemas(*schemas)
36 | schemas.reduce({}) do |result, schema|
37 | schema.each do |k,v|
38 | result[k] ||= []
39 | result[k] |= v
40 | end
41 | result
42 | end
43 | end
44 |
45 | def remove_redundant_keys(keys)
46 | result = []
47 | keys.each do |key|
48 | result << key if not keys.any? {|other| other != key && other.start_with?(key) }
49 | end
50 | result
51 | end
52 |
53 | # p hash_to_schema({"foo": 1})
54 | # p hash_to_schema(1)
55 | # p hash_to_schema("foo")
56 | # p hash_to_schema({foo: [1, 2 ,3]})
57 | # puts hash_to_schema({foo: [{bar: 1}, {baz: 2}, {bar: "foo"}]})
58 | # p hash_to_schema({foo: {bar: 1, baz: 2}})
59 | # p remove_redundant_keys(["foo", "foo[]"])
60 |
61 | # exit 0
62 |
63 | while true
64 | line = infile.gets
65 | break unless line
66 | line.strip!
67 | next if line.empty?
68 | json = JSON.parse(line)
69 | schema = merge_schemas(schema, hash_to_schema(json))
70 | end
71 |
72 | # schema = remove_redundant_keys(schema)
73 | type_map = {
74 | Fixnum => "Integer",
75 | Hash => "Object",
76 | FalseClass => "Boolean",
77 | TrueClass => "Boolean",
78 | }
79 | key_size = schema.keys.map(&:size).max
80 | schema.sort.each do |key, types|
81 | # skip unambiguous objects!
82 | next if types == [Hash]
83 | types = types.map {|type| type_map[type] || type.to_s }.sort.uniq.join(", ")
84 | puts "%-#{key_size}s => %s" % [key, types]
85 | end
86 |
87 | infile.close
88 |
--------------------------------------------------------------------------------
/bin/least:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # n.b. I posted this script to:
4 | # http://unix.stackexchange.com/a/205260/85237
5 | # https://github.com/johnmellor/scripts/blob/master/bin/least
6 |
7 | # Needed so less doesn't prevent trap from working.
8 | set -m
9 | # Keeps this script alive when Ctrl+C is pressed in less,
10 | # so we still cat and rm $TMPFILE afterwards.
11 | trap '' EXIT
12 |
13 | TMPBUFFER=$(mktemp 2>/dev/null || mktemp -t 'tmp')
14 |
15 | export LESS=-FR
16 | if [[ -t 0 ]]; then
17 | # Stdin is a terminal (neither a file nor a pipe); hopefully "$@" contains a
18 | # filename that less can read into a pipe.
19 | LESS= command less "$@" | tee "$TMPBUFFER" | command less "$@"
20 | else
21 | # Stdin is a file or pipe; pass it directly to tee.
22 | tee "$TMPBUFFER" | command less "$@"
23 | fi
24 |
25 | if [[ " $LESS $@" =~ \ -(-chop-long-lines|[a-jl-nq-su-zA-CE-NQ-SU-Z]*S) ]]; then
26 | # Don't wrap lines, since --chop-long-lines was passed to less.
27 | LINECOUNT=$(cat "$TMPBUFFER" | wc -l)
28 | else
29 | # Wrap lines like less before counting (the perl regex strips ANSI escapes).
30 | [[ -n $COLUMNS ]] || COLUMNS=$(tput cols)
31 | LINECOUNT=$(perl -pe 's/\e\[?.*?[\@-~]//g' "$TMPBUFFER" | fold -w "$COLUMNS" | wc -l)
32 | fi
33 | [[ -n $LINES ]] || LINES=$(tput lines)
34 | if (( $LINECOUNT < $LINES )); then
35 | cat "$TMPBUFFER"
36 | fi
37 |
38 | rm "$TMPBUFFER"
39 |
--------------------------------------------------------------------------------
/bin/lines:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | class Spec
4 | Grammar = %r{
5 | (? \g ){0}
6 |
7 | (?
8 | \g |
9 | (?\g)?(?:(?::|-)(?\g)? |
10 | \+(?\g))
11 | ){0}
12 | (? \g ){0}
13 |
14 | (? -?\d+ ){0}
15 |
16 | \A\g\Z
17 | }x
18 |
19 | def self.from_string(spec)
20 | match_data = Grammar.match(spec)
21 | raise "Invalid spec #{spec}" unless match_data
22 | # puts "match_data=#{match_data.inspect}"
23 | line_start = match_data[:line_start]
24 | line_start = 0 if line_start.nil?
25 | line_start = line_start.to_i
26 | line_count = match_data[:line_count]
27 | if line_count
28 | line_count = line_count.to_i
29 | if line_count < 0
30 | line_end = line_start
31 | line_start = line_end + line_count
32 | else
33 | line_end = line_start + line_count
34 | end
35 | else
36 | line_end = match_data[:line_end]
37 | line_end = -1 if line_end.nil?
38 | line_end = line_end.to_i
39 | end
40 | single_line_spec = match_data[:single_line_spec]
41 | if single_line_spec
42 | line_start = single_line_spec.to_i
43 | line_end = line_start
44 | if line_start < 0
45 | raise "Ambiguous spec. Please use either `:#{line_start}` or `#{line_start}+0`"
46 | end
47 | end
48 | return Spec.new(line_start, line_end)
49 | end
50 |
51 | def initialize(line_start, line_end)
52 | @line_start = line_start
53 | @line_end = line_end
54 | end
55 |
56 | attr_accessor :line_start, :line_end
57 | end
58 |
59 | require 'minitest'
60 |
61 | class TestSpec < Minitest::Test
62 | def line_spec_tester(spec, line_start, line_end)
63 | spec_object = Spec.from_string(spec)
64 | assert_equal line_start, spec_object.line_start, "provided spec: #{spec}. start: #{spec_object.line_start}, end: #{spec_object.line_end}"
65 | assert_equal line_end, spec_object.line_end, "provided spec: #{spec}. start: #{spec_object.line_start}, end: #{spec_object.line_end}"
66 | end
67 |
68 | def test_simple_specs
69 | line_spec_tester("1:4", 1, 4)
70 | line_spec_tester("1:", 1, -1)
71 | line_spec_tester("4:", 4, -1)
72 | line_spec_tester(":4", 0, 4)
73 | line_spec_tester(":", 0, -1)
74 |
75 | line_spec_tester("1-4", 1, 4)
76 | line_spec_tester("1-", 1, -1)
77 | line_spec_tester("4-", 4, -1)
78 | # line_spec_tester("-4", 0, 4) # this use case is ambiguous, and is disallowed at the spec level
79 | assert_raises { Spec.from_string("-4") }
80 | line_spec_tester("-", 0, -1)
81 | end
82 |
83 | def test_negative_specs
84 | line_spec_tester("8:-2", 8, -2)
85 | line_spec_tester("2:-2", 2, -2)
86 | line_spec_tester("-4:8", -4, 8)
87 | line_spec_tester("-4:-2", -4, -2)
88 | line_spec_tester("-4:", -4, -1)
89 | line_spec_tester(":-2", 0, -2)
90 | line_spec_tester("2:-2", 2, -2)
91 |
92 | line_spec_tester("8--2", 8, -2)
93 | line_spec_tester("-4-8", -4, 8)
94 | line_spec_tester("-4--2", -4, -2)
95 | line_spec_tester("-4-", -4, -1)
96 | line_spec_tester("--2", 0, -2)
97 | end
98 |
99 | def test_counting_specs
100 | line_spec_tester("3+2", 3, 5)
101 | line_spec_tester("-3+2", -3, -1)
102 | line_spec_tester("-3+0", -3, -3)
103 | end
104 |
105 | def test_single_line_specs
106 | line_spec_tester("3", 3, 3)
107 | # line_spec_tester("-3", -3, -3) # this use case is ambiguous, and is disallowed at the spec level
108 | assert_raises { Spec.from_string("-3") }
109 | end
110 | end
111 |
112 | class SpecList
113 | def initialize(specs)
114 | @specs = specs.map { |spec| Spec.from_string(spec) }
115 | @buffer_size = needed_buffer()
116 | @max_line = @specs.max_by(&:line_end).line_end
117 | @max_line = nil if @specs.any?{|spec| spec.line_end < 0}
118 | end
119 |
120 | attr_accessor :buffer_size, :max_line
121 |
122 | def needed_buffer
123 | start_min = @specs.min_by(&:line_start).line_start
124 | needed_buffer = [start_min, @specs.min_by(&:line_end).line_end].min
125 | needed_buffer = 0 if needed_buffer >= 0
126 | needed_buffer *= -1
127 | needed_buffer += 1 if needed_buffer != 0
128 | return needed_buffer
129 | end
130 |
131 | def match_specs(line_no, out_of)
132 | matching_specs = @specs.select do |spec|
133 | next if out_of.nil? && ( spec.line_start < 0 || spec.line_end < 0 )
134 | next if out_of < 0 && spec.line_start < 0
135 | out_of_at_least = out_of.abs
136 |
137 | result = if spec.line_start < 0
138 | spec.line_start <= (line_no - out_of_at_least)
139 | else
140 | spec.line_start <= line_no
141 | end
142 |
143 | result &&= if spec.line_end < 0
144 | spec.line_end >= (line_no - out_of_at_least)
145 | else
146 | spec.line_end >= line_no
147 | end
148 |
149 | result
150 | end
151 | return matching_specs
152 | end
153 |
154 | def emit_line(line, output, matching_specs)
155 | output << line
156 | end
157 |
158 | def process(input, output = STDOUT, strictly_order: false)
159 | buffer = []
160 | stream_size = 0
161 | input.each_line.with_index do |line, line_number|
162 | line_number += 1
163 | stream_size += 1
164 | break if @max_line && line_number > @max_line
165 |
166 | # first, determine which specs match the current line number
167 | if strictly_order
168 | buffer << line
169 | next if buffer.size <= @buffer_size
170 | line_no_under_consideration = line_number - @buffer_size
171 | line_to_emit = buffer.shift
172 | else
173 | line_no_under_consideration = line_number
174 | line_to_emit = line
175 | end
176 |
177 | matching_specs = match_specs(line_no_under_consideration, -(line_no_under_consideration + @buffer_size))
178 |
179 | unless matching_specs.empty?
180 | emit_line(line_to_emit, output, matching_specs)
181 | end
182 | end
183 |
184 | # output the rest of the lines left in the buffer, if need be
185 | buffer.each.with_index do |line, buffer_line_number|
186 | line_number = stream_size - @buffer_size + buffer_line_number
187 | line_number += 1
188 | matching_specs = match_specs(line_number, stream_size)
189 | unless matching_specs.empty?
190 | emit_line(line, output, matching_specs)
191 | end
192 | end
193 | end
194 | end
195 |
196 | class TestSpecList < Minitest::Test
197 | self.make_my_diffs_pretty!
198 |
199 | def test_detects_constraints_correctly
200 | list = SpecList.new(["1-4", "6-8"])
201 | assert_equal 0, list.needed_buffer
202 | assert_equal 8, list.max_line
203 |
204 | list = SpecList.new(["1-4", "6-"])
205 | assert_equal 2, list.needed_buffer
206 | assert_equal nil, list.max_line
207 | end
208 |
209 | def test_match_specs_works_simple_specs
210 | list = SpecList.new(["1-3", "5-6", "9-10"])
211 | cases = {
212 | 1 => true,
213 | 2 => true,
214 | 3 => true,
215 | 4 => false,
216 | 5 => true,
217 | 6 => true,
218 | 7 => false,
219 | 8 => false,
220 | 9 => true,
221 | }
222 | cases.each do |line_no, expected|
223 | matching_specs = list.match_specs(line_no, -line_no)
224 | assert_equal expected, !matching_specs.empty?, "line #{line_no}. Expected: #{expected}. Matched: #{matching_specs.inspect}"
225 | end
226 | end
227 |
228 | def test_match_specs_works_negative_specs
229 | list = SpecList.new(["7:-2", "-9:-8", "-6:5"])
230 | cases = {
231 | 1 => true,
232 | 2 => true,
233 | 3 => false,
234 | 4 => true,
235 | 5 => true,
236 | 6 => false,
237 | 7 => true,
238 | 8 => true,
239 | 9 => false,
240 | 10 => false,
241 | }
242 | cases.each do |line_no, expected|
243 | matching_specs = list.match_specs(line_no, cases.size)
244 | assert_equal expected, !matching_specs.empty?, "line #{line_no}. Expected: #{expected}. Matched: #{matching_specs.inspect}"
245 | end
246 | end
247 |
248 | def process_input_tester(list, expected, strictly_order: false)
249 | input = (1..expected.size).to_a.map(&:to_s).join("\n")
250 | output = []
251 | list.process(input, output, strictly_order: strictly_order)
252 | actual = output.map do |line|
253 | [line.to_i, true]
254 | end.to_h
255 | (1...expected.size).each do |line_no|
256 | assert_equal expected[line_no], actual[line_no] || false, "testing line ##{line_no}"
257 | end
258 | end
259 |
260 | def test_process_simple_case
261 | list = SpecList.new(["1-4"])
262 | process_input_tester(list, {
263 | 1 => true,
264 | 2 => true,
265 | 3 => true,
266 | 4 => true,
267 | 5 => false,
268 | 6 => false,
269 | 7 => false,
270 | 8 => false,
271 | 9 => false,
272 | 10 => false,
273 | })
274 | list = SpecList.new(["1-2", "4-6", "8-9"])
275 | process_input_tester(list, {
276 | 1 => true,
277 | 2 => true,
278 | 3 => false,
279 | 4 => true,
280 | 5 => true,
281 | 6 => true,
282 | 7 => false,
283 | 8 => true,
284 | 9 => true,
285 | 10 => false,
286 | })
287 | end
288 |
289 | def test_process_strict_ordering
290 | list = SpecList.new(["2-3"])
291 | process_input_tester(list, {
292 | 1 => false,
293 | 2 => true,
294 | 3 => true,
295 | 4 => false,
296 | 5 => false,
297 | 6 => false,
298 | }, strictly_order: true)
299 | end
300 |
301 | def test_process_strict_with_buffer
302 | list = SpecList.new(["2:-2"])
303 | process_input_tester(list, {
304 | 1 => false,
305 | 2 => true,
306 | 3 => true,
307 | 4 => false,
308 | 5 => false,
309 | }, strictly_order: true)
310 | end
311 | end
312 |
313 | if __FILE__ == $PROGRAM_NAME
314 | require 'optparse'
315 |
316 | options = {}
317 | OptionParser.new do |opts|
318 | opts.banner = "Usage: #{Process.argv0} [ROWSPEC]"
319 |
320 | opts.on("--test", "Run self-tests") do |test|
321 | options[:self_test] = test
322 | end
323 | end.parse!
324 |
325 | if options[:self_test]
326 | require 'minitest/autorun'
327 | exit 0 # TODO: exit with the correct code if the tests passed or not
328 | end
329 | ARGV << "1:" if ARGV.empty?
330 |
331 | list = SpecList.new(ARGV)
332 | #open stdin, read lines and emit them according to the spec
333 | list.process(STDIN, STDOUT)
334 | end
335 |
--------------------------------------------------------------------------------
/bin/organize:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # Organizes files by date according to a pattern (See USAGE)
4 |
5 |
6 | # Get the date from a filename
7 | #
8 | # Arguments:
9 | # filename
10 | # Output:
11 | # the date in YYYYMMDD format, if found
12 | # Return Codes:
13 | # 1 - no date found in the filename
14 | function date_for_file() {
15 | local filename="${1##*/}"
16 |
17 | # YYYYMMDD
18 | local pattern="^[0-9]{8}"
19 | if [[ "${filename}" =~ ${pattern} ]]; then
20 | echo "${filename:0:8}"
21 | return 0
22 | fi
23 |
24 | # YYYYMM (default to 1st of the month)
25 | local pattern="^[0-9]{6}"
26 | if [[ "${filename}" =~ ${pattern} ]]; then
27 | echo "${filename:0:6}01"
28 | return 0
29 | fi
30 |
31 | # YYYY-MM-DD
32 | local pattern="^[0-9]{4}-[0-9]{2}-[0-9]{2}"
33 | if [[ "${filename}" =~ ${pattern} ]]; then
34 | echo "${filename:0:4}${filename:5:2}${filename:8:2}"
35 | return 0
36 | fi
37 |
38 | # YYYY-MM (default to the 1st of the month)
39 | local pattern="^[0-9]{4}-[0-9]{2}"
40 | if [[ "${filename}" =~ ${pattern} ]]; then
41 | echo "${filename:0:4}${filename:5:2}01"
42 | return 0
43 | fi
44 |
45 | return 1
46 | }
47 |
48 |
49 | function _test_date_for_file() {
50 | local actual expected
51 |
52 | actual="$(date_for_file "20180708 - yyyymmdd example")"
53 | expected="20180708"
54 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
55 |
56 | actual="$(date_for_file "2018-07-08 - yyyy-mm-dd example")"
57 | expected="20180708"
58 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
59 | }
60 |
61 |
62 | # Compile the date pattern
63 | # Outputs a bash-compatible string intended to be eval'd in a scope with a variable named `epoch`.
64 | # Should safely quote the pattern, so this is intended to be secure.
65 | # Expected usage:
66 | # eval echo "$(compile_pattern "%y/%m")"
67 | #
68 | # Arguments:
69 | # pattern
70 | # Output:
71 | # the compiled pattern, read to be eval'd
72 | # shellcheck disable=SC2016
73 | function compile_pattern() {
74 | local pattern="$1"
75 | pattern="$(printf "%q" "${pattern}")"
76 |
77 | pattern="${pattern//%y/%Y}" # I reject the concept of 2-digit years
78 | pattern="${pattern//%q/'$(((10#$(printf "%(%m)T" "${epoch}") - 1) / 3 + 1))'}"
79 | pattern="${pattern//%h/'$(((10#$(printf "%(%m)T" "${epoch}") - 1) / 6 + 1))'}"
80 |
81 | # unquote spaces and tildes because we quote it anyways
82 | pattern="${pattern//\\\~/\~}"
83 | pattern="${pattern//\\ / }"
84 |
85 | echo '$(printf "'"%(${pattern})T"'" "${epoch}")'
86 | }
87 |
88 |
89 | # shellcheck disable=SC2016,SC2034,SC2088
90 | function _test_compile_pattern() {
91 | local actual expected
92 |
93 | actual="$(compile_pattern "%yQ%q")"
94 | expected='$(printf "%(%YQ$(((10#$(printf "%(%m)T" "${epoch}") - 1) / 3 + 1)))T" "${epoch}")'
95 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
96 |
97 | actual="$(compile_pattern "%y/%m - %B")"
98 | expected='$(printf "%(%Y/%m - %B)T" "${epoch}")'
99 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
100 |
101 | actual="$(compile_pattern '$(echo hi)/%y')"
102 | expected='$(printf "%(\$\(echo hi\)/%Y)T" "${epoch}")'
103 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
104 |
105 | actual="$(compile_pattern '~/%y')"
106 | expected='$(printf "%(~/%Y)T" "${epoch}")'
107 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
108 |
109 | actual="$(compile_pattern '%y %m')"
110 | expected='$(printf "%(%Y %m)T" "${epoch}")'
111 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
112 |
113 | local epoch="1527897600" # epoch for 20180602
114 | actual="$(eval echo "$(compile_pattern "%y %m %d")")"
115 | expected='2018 06 02'
116 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
117 |
118 | local epoch="1527897600" # epoch for 20180602
119 | actual="$(eval echo "$(compile_pattern "%b %y")")"
120 | expected='Jun 2018'
121 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
122 | }
123 |
124 |
125 | function day_of_year() {
126 | local date="$1"
127 | local -i day_of_year=0
128 | local -i year="${date:0:4}"
129 | local -i month="10#${date:4:2}"
130 | local -i day="10#${date:6:2}"
131 |
132 | if (( month >= 3 )); then
133 | # formula taken from https://en.wikipedia.org/wiki/Ordinal_date
134 | # the idea is to count the number of days from march 1st
135 | (( day_of_year = (306 * month - 914) / 10 + day + 59 ))
136 | (( day_of_year += year % 4 == 0 && (year % 100 != 0 || year % 400 == 0) ))
137 | elif (( month == 2 )); then
138 | (( day_of_year = day + 31))
139 | else
140 | (( day_of_year = day ))
141 | fi
142 |
143 | echo $day_of_year
144 | }
145 |
146 |
147 | function _test_day_of_year() {
148 | local actual expected date
149 |
150 | return 0 # disabled exhaustive tests because they are slow
151 |
152 | # exhaustive testing of all dates
153 | for i in $(seq 1 366); do
154 | date="$(date -u --date="20180101 + $i days" +%Y%m%d)"
155 | actual="$(day_of_year "$date")"
156 | expected="$(date -u --date="$date" +%-j)"
157 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}:$date expected=${expected} actual=${actual}"
158 | done
159 | for i in $(seq 1 366); do
160 | date="$(date -u --date="20160101 + $i days" +%Y%m%d)"
161 | actual="$(day_of_year "$date")"
162 | expected="$(date -u --date="$date" +%-j)"
163 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}:$date expected=${expected} actual=${actual}"
164 | done
165 | }
166 |
167 |
168 | # Convert a YYYYMMDD date into seconds since the epoch
169 | # This is surprisingly difficult to do in portable bash
170 | #
171 | # Arguments:
172 | # date
173 | # Output:
174 | # the number of seconds since the epoch for the midnight on the given date
175 | function date_to_epoch() {
176 | # inspired by https://www.reddit.com/r/bash/comments/8y7uzz/portable_date_formatting/e28whvg/
177 | # credit to u/whetu for pointing me in the direction of this solution, based upon:
178 | # http://www.etalabs.net/sh_tricks.html
179 | local date="$1"
180 | local -i day_of_year=0
181 | local -i year_offset="$(( 10#${date:0:4} - 1600 ))"
182 | local -i month="10#${date:4:2}"
183 | local -i day="10#${date:6:2}"
184 |
185 | # inlined version of day_of_year, but skipping leap day accounting
186 | if (( month >= 3 )); then
187 | # formula adapted from https://en.wikipedia.org/wiki/Ordinal_date
188 | # the idea is to count the number of days from march 1st
189 | (( day_of_year = (306 * month - 914) / 10 + day + 59 ))
190 | elif (( month == 2 )); then
191 | (( day_of_year = day + 31))
192 | else
193 | (( day_of_year = day ))
194 | fi
195 |
196 | # 135140 is the number of days between 16000101 and 19700101.
197 | # 1600 is used as the multiple-of-400 epoch here instead of 2000 since C-style division behaves badly with negative dividends
198 | (( days_since_epoch = day_of_year + year_offset * 365 + year_offset / 4 - year_offset / 100 + year_offset / 400 - 135140 ))
199 |
200 | echo $(( days_since_epoch * 24 * 60 * 60 ))
201 | }
202 |
203 |
204 | function _test_date_to_epoch() {
205 | local actual expected
206 |
207 | actual="$(date_to_epoch "20180602")"
208 | expected='1527897600'
209 | # expected="$(date +%s -u --date=20180602)"
210 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
211 |
212 | actual="$(date_to_epoch "20181231")"
213 | expected='1546214400'
214 | # expected="$(date +%s -u --date=20181231)"
215 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
216 |
217 | actual="$(date_to_epoch "20160301")"
218 | expected='1456790400'
219 | # expected="$(date +%s -u --date=20160301)"
220 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
221 |
222 | actual="$(date_to_epoch "20161231")"
223 | expected='1483142400'
224 | # expected="$(date +%s -u --date=20161231)"
225 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
226 | }
227 |
228 |
229 | # Prepares the commands to organize files according to a pattern.
230 | #
231 | # Arguments:
232 | # pattern - the compiled pattern; see compile_pattern
233 | # Input:
234 | # null-terminated file paths
235 | # Output:
236 | # commands that can be piped into bash
237 | function prepare_commands() {
238 | local verbose_flag=""
239 | (( verbosity > 0 )) && verbose_flag=" -v "
240 | local compiled_pattern="$1"
241 | local date epoch file destination_path
242 | while IFS= read -r -d '' file; do
243 | date="$(date_for_file "${file}")" || continue
244 | # shellcheck disable=SC2034
245 | epoch="$(date_to_epoch "${date}")"
246 |
247 | # see compile_pattern for why the eval is safe
248 | destination_path="$(eval echo "${compiled_pattern}")"
249 | echo "mkdir ${verbose_flag}-p \"${destination_path}\"; mv ${verbose_flag}'${file}' \"${destination_path}/\"'${file##*/}'"
250 | done
251 |
252 | return 0
253 | }
254 |
255 |
256 | function _test_prepare_commands() {
257 | local actual expected pattern
258 | pattern="$(compile_pattern "%yQ%q")"
259 |
260 | actual="$(printf '20180602 - Letter.pdf\x00' | prepare_commands "${pattern}")"
261 | expected="mkdir -p \"2018Q2\"; mv '20180602 - Letter.pdf' \"2018Q2/\"'20180602 - Letter.pdf'"
262 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
263 |
264 | actual="$(printf '/some/other/dir/20180602 - Letter.pdf\x00' | prepare_commands "/path/to/archive/${pattern}")"
265 | expected="mkdir -p \"/path/to/archive/2018Q2\"; mv '/some/other/dir/20180602 - Letter.pdf' \"/path/to/archive/2018Q2/\"'20180602 - Letter.pdf'"
266 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
267 |
268 | actual="$(printf "20180602\\x0020180706\\x00" | prepare_commands "${pattern}")"
269 | expected="mkdir -p \"2018Q2\"; mv '20180602' \"2018Q2/\"'20180602'"$'\n'"mkdir -p \"2018Q3\"; mv '20180706' \"2018Q3/\"'20180706'"
270 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
271 |
272 | actual="$(printf "notadatedfile\\x0020180706\\x00" | prepare_commands "${pattern}")"
273 | expected="mkdir -p \"2018Q3\"; mv '20180706' \"2018Q3/\"'20180706'"
274 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
275 |
276 | pattern="$(compile_pattern "%y %m")"
277 | actual="$(printf '20180602 - Letter.pdf\x00' | prepare_commands "${pattern}")"
278 | expected="mkdir -p \"2018 06\"; mv '20180602 - Letter.pdf' \"2018 06/\"'20180602 - Letter.pdf'"
279 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
280 | }
281 |
282 |
283 | # Enumerate the files to be organized
284 | #
285 | # Arguments:
286 | # files - defaults to reading files from stdin
287 | # Output:
288 | # null-terminated file paths
289 | function enumerate_files() {
290 | local files=("$@")
291 | if (( $# == 0 )) || [[ $# == 1 && "$1" == '-' ]]; then
292 | if [[ -z "${stdin_null}" ]]; then
293 | while read -r file; do
294 | [[ -f "${file}" ]] && printf '%s\x00' "${file}"
295 | done
296 | else
297 | while IFS= read -r -d "" file; do
298 | [[ -f "${file}" ]] && printf '%s\x00' "${file}"
299 | done
300 | fi
301 | else
302 | for file in "${files[@]}"; do
303 | [[ -f "${file}" ]] && printf '%s\x00' "${file}"
304 | done
305 | fi
306 |
307 | return 0
308 | }
309 |
310 | function _test_enumerate_files() {
311 | local actual expected pattern
312 | debug=0
313 |
314 | function _count_null_tokens() {
315 | local -i count=0
316 | while IFS= read -r -d '' token; do
317 | (( count += 1 ))
318 | (( debug > 0 )) && >&2 echo "count=$count; _=$token"
319 | done
320 | echo $count
321 | }
322 |
323 | actual="$(enumerate_files "$0" "$0" "$0" | _count_null_tokens)"
324 | expected="3"
325 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
326 |
327 | actual="$(printf '%s\n%s\n%s\n' "$0" "$0" "$0" | enumerate_files | _count_null_tokens)"
328 | expected="3"
329 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
330 |
331 | stdin_null=true
332 | actual="$(printf '%s\x00%s\x00%s\x00' "$0" "$0" "$0" | enumerate_files | _count_null_tokens)"
333 | expected="3"
334 | [[ "${actual}" == "${expected}" ]] || >&2 echo "FAIL $0:${LINENO}: expected=${expected} actual=${actual}"
335 | unset stdin_null
336 | }
337 |
338 |
339 | ####################################
340 | # main actions
341 | ####################################
342 |
343 |
344 | function organize() {
345 | local pattern="$1"
346 | if [[ -z "${pattern}" ]]; then
347 | echo "ERROR: PATTERN must be specified"
348 | >&2 show_usage
349 | return 1
350 | fi
351 | shift
352 |
353 | local execution=bash
354 | # shellcheck disable=SC2209
355 | [[ -n "$is_dry_run" ]] && execution=cat
356 |
357 | enumerate_files "$@" | prepare_commands "$(compile_pattern "${pattern}")" | $execution
358 | }
359 |
360 |
361 | function run_self_tests() {
362 | _test_date_for_file
363 | _test_compile_pattern
364 | _test_prepare_commands
365 | _test_day_of_year
366 | _test_date_to_epoch
367 | _test_enumerate_files
368 | }
369 |
370 |
371 | function show_usage() {
372 | local prog="${0##*/}"
373 | cat <<-HELPMESSAGE
374 | Usage: ${prog} [OPTIONS] PATTERN [FILE...] # organize files in the current directory
375 |
376 | Options:
377 | -n, --dry-run only show what would be done, but don't actually do it
378 | -t, --test run self-tests instead of organizing files
379 | -0, --null files read from stdin are terminated by a null character instead of by whitespace
380 | -v, --verbose
381 | HELPMESSAGE
382 | if (( verbosity > 0 )); then
383 | cat <<-VERBOSEHELP
384 |
385 | This utility organizes files into a folder hierarchy according to a pattern. Interpreted sequences are:
386 |
387 | %y year
388 | %m month (e.g. 07)
389 | %b locale's abbreviated month name
390 | %B locale's full month name (e.g. January)
391 | %d day (e.g. 08)
392 | %q quarter number (e.g. 20180325 is in Q1)
393 | %h half number (e.g. 20180101 is in H1, as is 20180630)
394 | %G year of ISO week number
395 | %V ISO week number
396 | VERBOSEHELP
397 | fi
398 | }
399 |
400 |
401 | function main() {
402 | local -a args=()
403 | declare -i verbosity=0
404 | # shellcheck disable=SC2016
405 | local subcommand='organize "${args[@]}"'
406 | while (( $# > 0 )); do
407 | case "$1" in
408 | -0|--null)
409 | stdin_null=true
410 | ;;
411 | -n|--dry-run)
412 | is_dry_run=true
413 | ;;
414 | -v|--verbose)
415 | verbosity+=1
416 | ;;
417 | -t|--test)
418 | subcommand="run_self_tests"
419 | ;;
420 | -\?|-h|--help|help)
421 | subcommand=">&2 show_usage"
422 | ;;
423 | --)
424 | shift
425 | break
426 | ;;
427 | *)
428 | args+=("$1")
429 | ;;
430 | esac
431 | shift
432 | done
433 | while (( $# > 0 )); do
434 | args+=("$1")
435 | shift
436 | done
437 |
438 | # shellcheck disable=SC2086
439 | eval $subcommand
440 | }
441 |
442 | if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
443 | main "$@"
444 | fi
445 |
--------------------------------------------------------------------------------
/bin/ppsql:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | if __FILE__ == $PROGRAM_NAME
4 | if ARGV.size < 1
5 | puts "Usage: #{$PROGRAM_NAME} DATABASE_URL"
6 | exit 1
7 | end
8 |
9 | require 'uri'
10 | u = URI.parse(ARGV[0].strip)
11 |
12 | command = "psql"
13 |
14 | args = ARGV[1..-1].map(&:strip)
15 | args << "--host=#{u.host}" if u.host
16 | args << "--port=#{u.port}" if u.port
17 | args << "--username=#{u.user}" if u.user
18 | args << "--dbname=#{u.path[1..-1]}" if u.path != "/"
19 |
20 | ENV["PGPASSWORD"] = u.password if u.password
21 |
22 | exec command, *args
23 | end
24 |
--------------------------------------------------------------------------------
/bin/rmate:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # rmate
4 | # Copyright (C) 2011-2018 by Harald Lapp
5 | #
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU General Public License as published by
8 | # the Free Software Foundation, either version 3 of the License, or
9 | # (at your option) any later version.
10 | #
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU General Public License
17 | # along with this program. If not, see .
18 |
19 | #
20 | # This script can be found at:
21 | # https://github.com/aurora/rmate
22 | #
23 |
24 | #
25 | # This script is a pure bash compatible shell script implementing remote
26 | # textmate functionality
27 | #
28 |
29 | #
30 | # Thanks very much to all users and contributors! All bug-reports,
31 | # feature-requests, patches, etc. are greatly appreciated! :-)
32 | #
33 |
34 | # init
35 | #
36 | version="1.0.1"
37 | version_date="2018-01-08"
38 | version_string="rmate-sh $version ($version_date)"
39 |
40 | # determine hostname
41 | function hostname_command(){
42 | if command -v hostname >/dev/null 2>&1; then
43 | echo "hostname"
44 | else {
45 | HOSTNAME_DESCRIPTOR="/proc/sys/kernel/hostname"
46 | if test -r "$HOSTNAME_DESCRIPTOR"; then
47 | echo "cat $HOSTNAME_DESCRIPTOR"
48 | else
49 | echo "hostname"
50 | fi
51 | }
52 | fi
53 | }
54 |
55 | hostname=$($(hostname_command))
56 |
57 | # default configuration
58 | host=localhost
59 | port=52698
60 | eval home=$(builtin printf "~%q" "${SUDO_USER:-$LOGNAME}")
61 |
62 | function load_config {
63 | local rc_file=$1
64 | local row
65 |
66 | local host_pattern="^host(:[[:space:]]+|=)([^ ]+)"
67 | local port_pattern="^port(:[[:space:]]+|=)([0-9]+)"
68 |
69 | if [ -f "$rc_file" ]; then
70 | while read -r row; do
71 | if [[ "$row" =~ $host_pattern ]]; then
72 | host=${BASH_REMATCH[2]}
73 | elif [[ "$row" =~ $port_pattern ]]; then
74 | port=${BASH_REMATCH[2]}
75 | fi
76 | done < "$rc_file"
77 | fi
78 | }
79 |
80 | for i in "/etc/${0##*/}" $home/."${0##*/}/${0##*/}.rc" $home/."${0##*/}.rc"; do
81 | load_config $i
82 | done
83 |
84 | host="${RMATE_HOST:-$host}"
85 | port="${RMATE_PORT:-$port}"
86 |
87 |
88 | # misc initialization
89 | filepaths=()
90 | displaynames=()
91 | selections=()
92 | filetypes=()
93 | verbose=false
94 | nowait=true
95 | force=false
96 |
97 | # process command-line parameters
98 | #
99 | function showusage {
100 | echo "usage: $(basename $0) [arguments] [--] file-path edit specified file
101 | or: $(basename $0) [arguments] - read text from stdin
102 |
103 | -H, --host HOST Connect to HOST. Use 'auto' to detect the host from
104 | SSH. Defaults to $host.
105 | -p, --port PORT Port number to use for connection. Defaults to $port.
106 | -w, --[no-]wait Wait for file to be closed by TextMate.
107 | -l, --line LINE Place caret on line number after loading file.
108 | +N Alias for --line, if N is a number (eg.: +5).
109 | -m, --name NAME The display name shown in TextMate.
110 | -t, --type TYPE Treat file as having specified type.
111 | -n, --new Open in a new window (Sublime Text).
112 | -f, --force Open even if file is not writable.
113 | -v, --verbose Verbose logging messages.
114 | -h, --help Display this usage information.
115 | --version Show version and exit.
116 | "
117 | }
118 |
119 | function log {
120 | if [[ $verbose = true ]]; then
121 | echo "$@" 1>&2
122 | fi
123 | }
124 |
125 | function dirpath {
126 | (cd "$(dirname "$1")" >/dev/null 2>/dev/null || { echo "unable to cd to $1 directory" 1>&2; exit; } ; pwd -P)
127 | }
128 |
129 | function canonicalize {
130 | local filepath="$1"
131 | local relativepath
132 | local result
133 |
134 | if [[ "${filepath:0:1}" = "-" ]]; then
135 | filepath="./$filepath"
136 | fi
137 |
138 | local dir=$(dirpath "$filepath")
139 |
140 | if [ -L "$filepath" ]; then
141 | relativepath=$(cd "$dir" || { echo "unable to cd to $dir" 1>&2; exit; } ; readlink "$(basename "$filepath")")
142 | result=$(dirpath "$relativepath")/$(basename "$relativepath")
143 | else
144 | result=$(basename "$filepath")
145 | if [ "$dir" = '/' ]; then
146 | result="$dir$result"
147 | else
148 | result="$dir/$result"
149 | fi
150 | fi
151 |
152 | echo "$result"
153 | }
154 |
155 | while [[ "${1:0:1}" = "-" || "$1" =~ ^\+([0-9]+)$ ]]; do
156 | case $1 in
157 | -)
158 | break
159 | ;;
160 | --)
161 | shift
162 | break
163 | ;;
164 | -H|--host)
165 | host=$2
166 | shift
167 | ;;
168 | -p|--port)
169 | port=$2
170 | shift
171 | ;;
172 | -w|--wait)
173 | nowait=false
174 | ;;
175 | --no-wait)
176 | nowait=true
177 | ;;
178 | -l|--line)
179 | selections+=($2)
180 | shift
181 | ;;
182 | -m|--name)
183 | displaynames+=($2)
184 | shift
185 | ;;
186 | -t|--type)
187 | filetypes+=($2)
188 | shift
189 | ;;
190 | -n|--new)
191 | new=true
192 | ;;
193 | -f|--force)
194 | force=true
195 | ;;
196 | -v|--verbose)
197 | verbose=true
198 | ;;
199 | --version)
200 | echo "$version_string"
201 | exit 1
202 | ;;
203 | -h|-\?|--help)
204 | showusage
205 | exit 1
206 | ;;
207 | +[0-9]*)
208 | selections+=(${1:1})
209 | ;;
210 | *)
211 | showusage
212 | exit 1
213 | ;;
214 | esac
215 |
216 | shift
217 | done
218 |
219 | if [[ "$host" = "auto" && "$SSH_CONNECTION" != "" ]]; then
220 | host=${SSH_CONNECTION%% *}
221 | fi
222 |
223 | filepaths=("$@")
224 |
225 | if [ "${filepaths[*]}" = "" ]; then
226 | if [[ $nowait = false ]]; then
227 | filepaths=('-')
228 | else
229 | case "$-" in
230 | *i*)
231 | showusage
232 | exit 1
233 | ;;
234 | *)
235 | filepaths=('-')
236 | ;;
237 | esac
238 | fi
239 | fi
240 |
241 |
242 | #------------------------------------------------------------
243 | # main
244 | #------------------------------------------------------------
245 |
246 | function open_file {
247 | local index="$1"
248 | local filepath="${filepaths[$index]}"
249 | local selection="${selections[$index]}"
250 | local filetype="${filetypes[$index]}"
251 | local displayname="${displaynames[$index]}"
252 | local realpath
253 | local data
254 |
255 | if [ "$filepath" != "-" ]; then
256 | realpath=$(canonicalize "$filepath")
257 | log "$realpath"
258 |
259 | if [ -d "$filepath" ]; then
260 | echo "$filepath is a directory and rmate is unable to handle directories."
261 | exit 1
262 | fi
263 |
264 | if [ -f "$realpath" ] && [ ! -w "$realpath" ]; then
265 | if [[ $force = false ]]; then
266 | echo "File $filepath is not writable! Use -f to open anyway."
267 | exit 1
268 | elif [[ $verbose = true ]]; then
269 | log "File $filepath is not writable! Opening anyway."
270 | fi
271 | fi
272 |
273 | if [ "$displayname" = "" ]; then
274 | displayname="$hostname:$filepath"
275 | fi
276 | else
277 | displayname="$hostname:untitled"
278 | fi
279 |
280 | echo "open" 1>&3
281 | echo "display-name: $displayname" 1>&3
282 | echo "real-path: $realpath" 1>&3
283 | echo "data-on-save: yes" 1>&3
284 | echo "re-activate: yes" 1>&3
285 | echo "token: $filepath" 1>&3
286 |
287 | if [[ $new = true ]]; then
288 | echo "new: yes" 1>&3
289 | fi
290 |
291 | if [ "$selection" != "" ]; then
292 | echo "selection: $selection" 1>&3
293 | fi
294 |
295 | if [ "$filetype" != "" ]; then
296 | echo "file-type: $filetype" 1>&3
297 | fi
298 |
299 | if [ "$filepath" != "-" ] && [ -f "$filepath" ]; then
300 | filesize=$(($(wc -c <"$realpath")))
301 | echo "data: $filesize" 1>&3
302 | cat "$realpath" 1>&3
303 | elif [ "$filepath" = "-" ]; then
304 | if [ -t 0 ]; then
305 | echo "Reading from stdin, press ^D to stop"
306 | else
307 | log "Reading from stdin"
308 | fi
309 |
310 | # preserve trailing newlines
311 | data=$(cat; echo x)
312 | data=${data%x}
313 | filesize=$(($(echo -ne "$data" | wc -c)))
314 | echo "data: $filesize" 1>&3
315 | echo -n "$data" 1>&3
316 | else
317 | echo "data: 0" 1>&3
318 | fi
319 |
320 | echo 1>&3
321 | }
322 |
323 | function handle_connection {
324 | local cmd
325 | local name
326 | local value
327 | local token
328 | local tmp
329 | local content
330 |
331 | while read -r 0<&3; do
332 | REPLY="${REPLY#"${REPLY%%[![:space:]]*}"}"
333 | REPLY="${REPLY%"${REPLY##*[![:space:]]}"}"
334 |
335 | cmd=$REPLY
336 |
337 | token=""
338 | tmp=""
339 |
340 | while read -r 0<&3; do
341 | REPLY="${REPLY#"${REPLY%%[![:space:]]*}"}"
342 | REPLY="${REPLY%"${REPLY##*[![:space:]]}"}"
343 |
344 | if [ "$REPLY" = "" ]; then
345 | break
346 | fi
347 |
348 | name="${REPLY%%:*}"
349 | value="${REPLY##*:}"
350 | value="${value#"${value%%[![:space:]]*}"}" # fix textmate syntax highlighting: "
351 |
352 | case $name in
353 | "token")
354 | token=$value
355 | ;;
356 | "data")
357 | if [ "$tmp" = "" ]; then
358 | tmp="/tmp/rmate.$RANDOM.$$"
359 | touch "$tmp"
360 | fi
361 |
362 | dd bs=1 count=$value <&3 >>"$tmp" 2>/dev/null
363 | ;;
364 | *)
365 | ;;
366 | esac
367 | done
368 |
369 | if [[ "$cmd" = "close" ]]; then
370 | log "Closing $token"
371 | if [[ "$token" == "-" ]]; then
372 | echo -n "$content"
373 | fi
374 | elif [[ "$cmd" = "save" ]]; then
375 | log "Saving $token"
376 | if [ "$token" != "-" ]; then
377 | cat "$tmp" > "$token"
378 | else
379 | content=$(cat "$tmp")
380 | fi
381 | rm "$tmp"
382 | fi
383 | done
384 |
385 | log "Done"
386 | }
387 |
388 | # connect to textmate and send command
389 | #
390 | exec 3<> "/dev/tcp/$host/$port"
391 |
392 | if [ $? -gt 0 ]; then
393 | echo "Unable to connect to TextMate on $host:$port"
394 | exit 1
395 | fi
396 |
397 | read -r server_info 0<&3
398 |
399 | log $server_info
400 |
401 | for i in "${!filepaths[@]}"; do
402 | open_file "$i"
403 | done
404 |
405 | echo "." 1>&3
406 |
407 | if [[ $nowait = true ]]; then
408 | exec /dev/null 2>/dev/null
409 | ( (handle_connection &) &)
410 | else
411 | handle_connection
412 | fi
413 |
--------------------------------------------------------------------------------
/bin/run_services:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # This script runs services in the background, logs their output, and spins them down gently
4 | #
5 | # I use it in development, since I don't want to have databases and message brokers running
6 | # all the time on my laptop.
7 | #
8 | # Reads a list of services either from ARGV or from a ".services" file
9 | # Alternatively, can take a list of services (from either source) in label=command format
10 |
11 | function cleanup_from_pid_file() {
12 | if [[ ! -f "$1" ]]; then
13 | exit 0
14 | fi
15 |
16 | if ! graceful_kill "$(cat "$1")" -SIGINT 3 -SIGTERM 10 -0; then
17 | rm "$1"
18 | exit 0
19 | fi
20 | exit 1
21 | }
22 |
23 | function cleanup() {
24 | for label in "$@"; do
25 | cleanup_from_pid_file ".${label}.pid" &
26 | done
27 | }
28 |
29 | function map_service() {
30 | case "$1" in
31 | postgresql|postgres|pgsql|pg|psql)
32 | echo "postgres=postgres -D /usr/local/var/postgres"
33 | ;;
34 | redis)
35 | echo "redis=redis-server /usr/local/etc/redis.conf"
36 | ;;
37 | memcached|memcache|mcache|memc)
38 | echo "memcached=memcached"
39 | ;;
40 | rabbitmq|rmq)
41 | echo "rabbitmq=rabbitmq-server"
42 | ;;
43 | mongodb|mongo)
44 | echo "mongodb=mongod --dbpath=.mongodb"
45 | ;;
46 | mailcatcher|mailc)
47 | echo "mailcatcher=mailcatcher -f"
48 | ;;
49 | *=*)
50 | echo "$1"
51 | ;;
52 | *)
53 | echo "$(basename "$1")=$*"
54 | ;;
55 | esac
56 | }
57 |
58 | function main() {
59 | local -a services
60 | if [[ $# -gt 0 ]]; then
61 | services=( "$@" )
62 | elif [[ -s "./.services" ]]; then
63 | mapfile services < ./.services
64 | else
65 | services=( )
66 | fi
67 |
68 | if [[ ${#services[@]} -eq 0 ]]; then
69 | echo "Usage: run_services SERVICES"
70 | echo "Or list services to run in a .services file"
71 | exit 1
72 | fi
73 | local -a labels
74 | local -a commands
75 | local i
76 | for i in "${!services[@]}"; do
77 | # NOTE: this needs to be unquoted, because otherwise it includes the potential newline in each service label,
78 | # which wreaks havoc on the service mapping
79 | services[$i]=$( map_service ${services[$i]} )
80 | labels[$i]="${services[$i]/%=*/}"
81 | commands[$i]="${services[$i]:((${#labels[$i]} + 1))}"
82 | done
83 | local cleanup_command="cleanup ${labels[*]}; exit"
84 | trap "$cleanup_command" SIGHUP SIGINT SIGTERM
85 | for i in "${!services[@]}"; do
86 | echo "Starting ${labels[$i]}: ${commands[$i]}"
87 | ${commands[$i]} > ."${labels[$i]}".log &
88 | echo $! > ."${labels[$i]}".pid
89 | done
90 | while true; do
91 | sleep 10
92 | done
93 | }
94 |
95 | main "$@"
96 |
--------------------------------------------------------------------------------
/bin/shell_patterns:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function test_patterns() {
4 | echo " 1#*x = ${1#*x}"
5 | echo " 1#*x* = ${1#*x*}"
6 | echo " 1#x* = ${1#x*}"
7 | echo "1##*x = ${1##*x}"
8 | echo "1##*x* = ${1##*x*}"
9 | echo " 1##x* = ${1##x*}"
10 | echo " 1%*x = ${1%*x}"
11 | echo " 1%*x* = ${1%*x*}"
12 | echo " 1%x* = ${1%x*}"
13 | echo "1%%*x = ${1%%*x}"
14 | echo "1%%*x* = ${1%%*x*}"
15 | echo " 1%%x* = ${1%%x*}"
16 | }
17 |
18 | for teststring in "$@"; do
19 | test_patterns "$teststring"
20 | done
21 |
--------------------------------------------------------------------------------
/bin/sort_scanned_files:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require 'fileutils'
4 |
5 | MonthNames = {
6 | 1 => "January",
7 | 2 => "February",
8 | 3 => "March",
9 | 4 => "April",
10 | 5 => "May",
11 | 6 => "June",
12 | 7 => "July",
13 | 8 => "August",
14 | 9 => "September",
15 | 10 => "October",
16 | 11 => "November",
17 | 12 => "December",
18 | }
19 |
20 | def likely_destination_dir?(dir)
21 | return File.exist?(File.expand_path(Time.now.year.to_s, dir))
22 | end
23 |
24 | def likely_source_dir?(dir)
25 | pdfs = Dir[File.expand_path("*.pdf", dir)]
26 | all_files = Dir[File.expand_path("*", dir)]
27 | return pdfs.size > 0 && ((all_files.size - pdfs.size) / all_files.size.to_f) > 0.5
28 | end
29 |
30 | def confirm_suspected_dir(source, dest)
31 | unless likely_source_dir(source) && likely_destination_dir(dest)
32 | raise "Can't automatically detect directories"
33 | end
34 | return source, dest
35 | end
36 |
37 | def determine_dirs(args)
38 | case args.size
39 | when 1
40 | puts "attempting auto-discovery based on current directory"
41 | if likely_destination_dir?(Dir.pwd)
42 | puts "found directory for current year. Assuming this is the destination"
43 | return confirm_suspected_dir(args[0], Dir.pwd)
44 | end
45 |
46 | if likely_source_dir?(Dir.pwd)
47 | puts "found more than 50% pdfs in current dir. Assuming this is the source"
48 | return confirm_suspected_dir(Dir.pwd, args[0])
49 | end
50 | raise "Can't make an intelligent decision"
51 | when 2
52 | return args[0], args[1]
53 | else
54 | raise ""
55 | end
56 | end
57 |
58 | def main(args)
59 | begin
60 | source, destination_root = determine_dirs(args)
61 | rescue => e
62 | puts "Error: #{e.message}" if e.message != ""
63 | puts <<-USAGE.gsub(/^ +/, "")
64 | USAGE: #{File.basename($PROGRAM_NAME)} SOURCE_DIR DEST_ROOT
65 | USAGE
66 | exit 1
67 | end
68 | source = File.expand_path(source)
69 | destination_root = File.expand_path(destination_root)
70 |
71 | unless File.directory?(source)
72 | puts "#{source} is not a directory"
73 | exit 1
74 | end
75 | unless File.directory?(destination_root)
76 | puts "#{destination_root} is not a directory"
77 | exit 1
78 | end
79 |
80 | files_to_sort = Dir["#{source}/*.pdf"]
81 | puts "Sorting out #{files_to_sort.size} pdfs"
82 | files_to_sort.each do |file_path|
83 | filename = File.basename(file_path)
84 | unless filename =~ /^(?\d{4})(?\d{2})(?\d{2}) - .*$/
85 | puts "skipping #{filename}"
86 | next
87 | end
88 |
89 | destination_folder = "#{destination_root}/#{$~[:year]}/#{"%02d" % $~[:month].to_i} - #{MonthNames[$~[:month].to_i]}"
90 | FileUtils.mkdir_p(destination_folder)
91 | destination_file = "#{destination_folder}/#{filename}"
92 | if File.exist?(destination_file)
93 | if File.binread(file_path) == File.binread(destination_file)
94 | FileUtils.rm(file_path)
95 | next
96 | end
97 |
98 | puts "skipping file with same name, but different than destination: #{filename}"
99 | next
100 | end
101 | FileUtils.mv(file_path, destination_file, verbose: true)
102 | end
103 | end
104 |
105 | if __FILE__ == $PROGRAM_NAME
106 | main(ARGV)
107 | end
108 |
--------------------------------------------------------------------------------
/bin/triage:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require 'thunder'
4 | require 'fileutils'
5 |
6 | class Triage
7 | class Log
8 | def initialize
9 | @tasks = []
10 | end
11 |
12 | attr_accessor :tasks
13 |
14 | def self.load(file)
15 | log = self.new()
16 | current_task = nil
17 | file.lines.each do |line|
18 | next if line.strip.empty? && current_task.nil?
19 | if line =~ /^(?\d{8}) - (?.+)$/
20 | current_task = Task.new($~[:date], $~[:subject], "")
21 | log.tasks << current_task
22 | else
23 | current_task.log << "\n" << line.rstrip.sub(/^ /, "")
24 | end
25 | end
26 | return log
27 | end
28 |
29 | def to_s
30 | return @tasks.map(&:to_s).join("\n") + "\n"
31 | end
32 | end
33 |
34 | class Task
35 | def initialize(date, subject, log)
36 | @date = date
37 | @subject = subject
38 | @log = log
39 | end
40 |
41 | attr_accessor :date, :subject, :log
42 |
43 | def to_s
44 | return "#{@date} - #{@subject}\n#{@log.strip.lines.map{|line|" #{line}"}.join()}"
45 | end
46 | end
47 |
48 | class CLI
49 | include Thunder
50 |
51 | def start(*args)
52 | triage_log_path = find_triage_log()
53 | if triage_log_path.nil?
54 | puts "No triage file found...exiting"
55 | exit 1
56 | end
57 | load_triage_log(triage_log_path)
58 |
59 | super
60 | end
61 |
62 | def load_triage_log(filename)
63 | @triage = Log.load(File.read(filename))
64 | end
65 |
66 | def find_triage_log
67 | previous = nil
68 | current = File.expand_path(".")
69 | while true
70 | [
71 | "triage",
72 | ".issues/triage",
73 | ].each do |candidate|
74 | full_path = File.expand_path(candidate, current)
75 | return full_path if File.file?(full_path)
76 | end
77 | previous = current
78 | current = File.dirname(current)
79 | break if current == previous
80 | end
81 | [
82 | "~/triage",
83 | "~/.issues/triage",
84 | ].each do |candidate|
85 | full_path = File.expand_path(candidate)
86 | return full_path if File.exist?(full_path)
87 | end
88 | return nil
89 | end
90 |
91 | desc "edit", "launch an editor for the triage file"
92 | def edit
93 | # prefer VISUAL, falling back on EDITOR
94 | editor = ENV["VISUAL"] || ENV["EDITOR"]
95 | if editor
96 | exec("#{editor} '#{find_triage_log()}'")
97 | else
98 | puts "Cannot determine editor. Please define VISUAL or EDITOR environment variables"
99 | end
100 | end
101 |
102 | desc "what", "show the current issue I'm triaging"
103 | def what
104 | current_task = @triage.tasks.first
105 | if current_task
106 | puts current_task.subject
107 | end
108 | end
109 |
110 | desc "defer [N]", "defer the first N tasks to the end of the list"
111 | def defer(amount = 1)
112 | amount.to_i.times do
113 | @triage.tasks.push(@triage.tasks.shift)
114 | end
115 | File.write(find_triage_log(), @triage)
116 | end
117 |
118 | desc "all"
119 | def all
120 | puts @triage.tasks.map(&:subject)
121 | end
122 |
123 | desc "add SUBJECT", "add a new task to the list"
124 | def add(*subject)
125 | new_task = Task.new(Time.now.strftime("%Y%m%d"), subject.join(" "), "")
126 | @triage.tasks.unshift(new_task)
127 | File.write(find_triage_log(), @triage)
128 | end
129 |
130 | desc "log COMMENT", "record a log line for the current task"
131 | def log(*comment)
132 | current_task = @triage.tasks.first
133 | raise "No current task" unless current_task
134 | current_task.log << "\n" << comment.join(" ")
135 |
136 | File.write(find_triage_log(), @triage)
137 | end
138 |
139 | desc "swap [TO]", "swap the first task with the TOth task in the list"
140 | def swap(with_target = 1)
141 | with_target = with_target.to_i
142 | top_task = @triage.tasks[0]
143 | @triage.tasks[0] = @triage.tasks[with_target]
144 | @triage.tasks[with_target] = top_task
145 | File.write(find_triage_log(), @triage)
146 | end
147 |
148 | desc "resolve", "mark the current task as resolved"
149 | def resolve
150 | triage_file_path = find_triage_log()
151 | archive_file = File.expand_path("../.triage/#{Time.now.strftime("%Y%m%d")}", triage_file_path)
152 | if File.exists?(archive_file)
153 | @resolved = Log.load(File.read(archive_file))
154 | else
155 | @resolved = Log.new
156 | end
157 | task = @triage.tasks.shift
158 | task.log << "\n" << "Marked as resolved at #{Time.now.strftime("%Y%m%d %H:%M:%S")}"
159 | @resolved.tasks.push(task)
160 | File.write(triage_file_path, @triage)
161 | FileUtils.mkdir_p(File.dirname(archive_file))
162 | File.write(archive_file, @resolved)
163 | end
164 | end
165 | end
166 |
167 | Triage::CLI.new.start
168 |
--------------------------------------------------------------------------------
/bin/wifiqr:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # default to printing the current wireless network
4 |
5 | function _qr_code_contents() {
6 | echo "WIFI:T:WPA;S:$1;P:$2;;"
7 | }
8 |
9 | function _find_current_network() {
10 | # Ubuntu typically uses iwgetid
11 | if type -t iwgetid >/dev/null; then
12 | iwgetid -r
13 | return 0
14 | fi
15 |
16 | # Fall back on nm-tool
17 | if type -t nm-tool >/dev/null; then
18 | nm-tool | awk 'f&&/\*/{ssid = substr($1, 2, length($1) - 2); print ssid};/Wireless Access Points \(\* = current AP\)/{f=1}'
19 | return 0
20 | fi
21 |
22 | # Mac uses airport
23 | if [[ -x /System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport ]]; then
24 | /System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport -I | awk '/ SSID/ {print substr($0, index($0, $2))}'
25 | return 0
26 | fi
27 |
28 | echo 'Unable to discover current wireless network' 1>&2
29 | return 1
30 | }
31 |
32 | function _find_network_pass() {
33 | if [[ -f "/etc/NetworkManager/system-connections/$1" ]]; then
34 | sudo cat "/etc/NetworkManager/system-connections/$1" | grep psk= | cut -d= -f 2
35 | return 0
36 | fi
37 |
38 | if type -t security >/dev/null; then
39 | security find-generic-password -wa "$1"
40 | # TODO: check success
41 | return 0
42 | fi
43 |
44 | echo 'Unable to discover current wireless network password' 1>&2
45 | return 1
46 | }
47 |
48 | function show_usage() {
49 | local prog="$(basename "$0")"
50 | cat <<-HELPMESSAGE
51 | $prog SSID PASSWORD # generate a QR code for quickly setting up the network
52 | $prog SSID # Looks up the password as saved
53 | HELPMESSAGE
54 | }
55 |
56 | function main() {
57 | local network="$1"
58 | case "$network" in
59 | -?|-h|--help|help)
60 | show_usage
61 | exit
62 | ;;
63 | "")
64 | network="$(_find_current_network)"
65 | if [[ $? != 0 ]]; then
66 | exit 1
67 | fi
68 | if [[ -z "$network" ]]; then
69 | echo 'No current network found. Are you connected?'
70 | exit 1
71 | fi
72 | ;;
73 | esac
74 |
75 | local password
76 | if [[ $# -lt 2 ]]; then
77 | password="$(_find_network_pass "$network")"
78 | if [[ $? != 0 ]]; then
79 | exit 1
80 | fi
81 | else
82 | password="$2"
83 | fi
84 |
85 | _qr_code_contents "$network" "$password" | qrencode -t UTF8
86 | }
87 |
88 | main "$@"
89 |
--------------------------------------------------------------------------------
/githooks/dispatching_hook:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # This is a generic hook that is meant to invoke multiple hooks,
4 | # handling both arguments, stdin, and breaking on the first error
5 |
6 | # This makes it easy to write hooks that do exactly one thing
7 |
8 | HOOK_NAME="$(basename "${BASH_SOURCE[0]}")"
9 |
10 | GIT_DIR="$(git rev-parse --git-dir)"
11 | HOOKS="$GIT_DIR/hooks/$HOOK_NAME.d"
12 | if [[ -d "$HOOKS" ]]; then
13 | stdin="$(cat; echo x)"
14 | for HOOK in "$HOOKS"/*; do
15 | if [[ -f "$HOOK" && -x "$HOOK" ]]; then
16 | printf "%s" "${stdin%x}" | "$HOOK" "$@" || exit $?
17 | fi
18 | done
19 | fi
20 |
21 | exit 0
22 |
--------------------------------------------------------------------------------
/githooks/install_hooks:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # This is a script that installs all the hooks alongside it in the directory to the containing git repo
4 | #
5 | # The intention is that this script is copied from Steven Karas's bashfiles repo (https://github.com/stevenkaras/bashfiles)
6 | # into your project. Please leave this notice in the file so you can find the most up to date version of this script.
7 |
8 | shopt -s extglob
9 |
10 | HOOKS_DIR="$(git rev-parse --git-dir)/hooks"
11 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
12 |
13 | function remove_broken_symlinks() {
14 | local ROOTDIR
15 | ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
16 | local target="$1"
17 |
18 | for file in "$target/"*; do
19 | if [[ -h "$file" && ! -e "$file" ]]; then
20 | local symlink_target
21 | symlink_target="$(readlink -n "$file")"
22 | [[ "$symlink_target" = "$ROOTDIR"/* ]] && rm "$file"
23 | fi
24 | done
25 | }
26 |
27 | # install the given hook
28 | function install_hook() {
29 | local hook_name="$1"
30 | local hook_path="$HOOKS_DIR/${hook_name%%.*}"
31 | if [[ -e "$hook_path" ]]; then
32 | return 0
33 | fi
34 | if [[ -e "$DIR/$hook_name" ]]; then
35 | ln -snfT "$DIR/$hook_name" "$hook_path"
36 | fi
37 | if [[ -d "$DIR/$hook_name" ]]; then
38 | ln -snfT "$DIR/$hook_name" "$hook_path.d"
39 | ln -snfT dispatching_hook "$hook_path"
40 | fi
41 | }
42 |
43 | # remove any broken hooks
44 | remove_broken_symlinks "$HOOKS_DIR"
45 |
46 | for hook in "$DIR"/!("$(basename "${BASH_SOURCE[0]}")"); do
47 | install_hook "$(basename "$hook")"
48 | done
49 |
--------------------------------------------------------------------------------
/githooks/post-command-pull.sync_repos:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # see the documentation in the pre-command-pull.sync_repos file
4 |
5 | SYNC_REMOTE=sync-mirrors
6 | lock_server="$(git config remote."${SYNC_REMOTE}".lock-server)"
7 | lock_name="$(git config remote."${SYNC_REMOTE}".lock-name)"
8 |
9 | if [[ -z "$lock_server" || -z "$lock_name" ]]; then
10 | exit 0
11 | fi
12 |
13 | # push to all the synced repos
14 | if ! git push --mirror sync-mirrors; then
15 | >&2 printf "Failed to push to all synced repos; not releasing lock\n"
16 | >&2 printf "Once the failure has been fixed, release the lock with:\n"
17 | >&2 printf "ssh %s \"bin/fs_advisory_lock unlock \\\"\${TMPDIR:-/tmp}/%s\\\".mppush\"\n" "$lock_server" "$lock_name"
18 | exit 1
19 | fi
20 |
21 | # shellcheck disable=SC2029
22 | if ! ssh "$lock_server" "bin/fs_advisory_lock unlock \"\${TMPDIR:-/tmp}/$lock_name\".mppush"; then
23 | >&2 printf "Failed to release lock. Check who is holding the lock at %s:\${TMPDIR:-/tmp}/%s.mppush\n" "$lock_server" "$lock_name"
24 | exit 1
25 | fi
26 |
27 | # we've finished the pull
28 | exit 0
29 |
--------------------------------------------------------------------------------
/githooks/post-receive.async_mirror:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # post-receive githook for asynchronous mirroring
4 | # This means that the content is pushed later.
5 | # It also means failures are not displayed to the user
6 | # You may want to consider using the sync hook if a delay or temporary failure is unacceptable
7 |
8 | # The assumption is that there is a remote called "async-mirrors" that includes all the mirrors to push to asynchronously.
9 | # if there is no such remote, this hook does nothing
10 |
11 | REPO_NAME="TBD"
12 |
13 | mirror_push_url="$(git remote get-url --push async-mirrors 2>&1)"
14 | if [[ $? == 0 && -n "$mirror_push_url" ]]; then
15 | cat <<-BASH | at now
16 | fs_advisory_lock lock "$TMPDIR/$REPO_NAME"
17 | git push --mirror async-mirrors
18 | fs_advisory_lock unlock "$TMPDIR/$REPO_NAME"
19 | BASH
20 | fi
21 |
--------------------------------------------------------------------------------
/githooks/post-receive.multiprimary:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # post-receive githook for multiprimary mirroring
4 |
5 | if [[ "$GIT_PUSH_OPTION_COUNT" == 1 && "$GIT_PUSH_OPTION_0" == "mirror" ]]; then
6 | exit 0
7 | fi
8 |
9 | git push --mirror primaries -o mirror
10 |
11 | # release the lock
12 | PRIMARY_SERVER="TBD"
13 | REPO_NAME="TBD"
14 |
15 | ssh "$PRIMARY_SERVER" "bin/fs_advisory_lock unlock \"\${TMPDIR:-/tmp}/$REPO_NAME\".mppush"
16 |
--------------------------------------------------------------------------------
/githooks/post-receive.sync_mirror:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # post-receive githook for synchronous mirroring
4 | # This means that you will see the result of the mirroring while pushing your changes.
5 | # You may want to consider using the async hook if a delay or temporary failure is acceptable
6 |
7 | # The assumption is that there is a remote called "sync-mirrors" that includes all the mirrors to push to asynchronously.
8 | # if there is no such remote, this hook does nothing
9 |
10 | mirror_push_url="$(git remote get-url --push sync-mirrors 2>&1)"
11 | if [[ $? == 0 && -n "$mirror_push_url" ]]; then
12 | git push --mirror sync-mirrors
13 | fi
14 |
--------------------------------------------------------------------------------
/githooks/post-svn-dcommit:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | git stash pop # stashed in pre-svn-dcommit hook
4 |
--------------------------------------------------------------------------------
/githooks/post-svn-rebase:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | git stash pop # stashed in pre-svn-rebase hook
4 |
--------------------------------------------------------------------------------
/githooks/pre-command-commit.refuse_shared_author:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | author_info="$(git var GIT_AUTHOR_IDENT)"
4 | if [[ "$author_info" != *"SHOULD NOT BE USED"* ]]; then
5 | # only snark for shared authors
6 | exit 0
7 | fi
8 |
9 | # search $@ for --author
10 | for arg in "$@"; do
11 | case "$arg" in
12 | --author*)
13 | exit 0
14 | ;;
15 | esac
16 | done
17 |
18 | cat >&2 <&2 printf "Failed to acquire lock. Check who is holding the lock at %s:\${TMPDIR:-/tmp}/%s.mppush\n" "$lock_server" "$lock_name"
25 | exit 1
26 | fi
27 |
28 | # do the pull
29 | exit 0
30 |
--------------------------------------------------------------------------------
/githooks/pre-command-pull.track_branch:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | current_branch="$(git rev-parse --symbolic --abbrev-ref HEAD)"
4 | current_remote="$(git config branch.${current_branch}.remote)"
5 |
6 | tracking_branch="$(git show ${current_branch}:.gittracking 2>/dev/null)"
7 | if [[ $? != 0 ]]; then
8 | tracking_branch="$(git config branch.${current_branch}.autotracking)"
9 | fi
10 |
11 | if [[ -z "$tracking_branch" ]]; then
12 | exit 0
13 | fi
14 |
15 | if [[ "$#" -gt 0 ]]; then
16 | cat <
18 | #
19 | # This sample shows how to prevent push of commits where the log message starts
20 | # with "WIP" (work in progress).
21 |
22 | remote="$1"
23 | url="$2"
24 | current_branch=`git rev-parse --symbolic --abbrev-ref HEAD`
25 |
26 | z40=0000000000000000000000000000000000000000
27 |
28 | while read local_ref local_sha remote_ref remote_sha
29 | do
30 | if [ "$local_sha" = $z40 ]
31 | then
32 | # Handle delete
33 | :
34 | else
35 | if [ "$remote_sha" = $z40 ]
36 | then
37 | # New branch, examine all commits
38 | range="$local_sha"
39 | else
40 | # Update to existing branch, examine new commits
41 | range="$remote_sha..$local_sha"
42 | fi
43 |
44 | if [ "$remote" = "origin" ]
45 | then
46 | # Check for WIP commit
47 | commit=`git rev-list -n 1 --grep '^WIP' "$range" --not --remotes`
48 | if [ -n "$commit" ]
49 | then
50 | echo >&2 "Found WIP commit in $local_ref, not pushing"
51 | exit 1
52 | fi
53 | fi
54 | fi
55 | done
56 |
57 | exit 0
58 |
--------------------------------------------------------------------------------
/githooks/pre-receive.multiprimary:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # pre-receive githook for multiprimary mirroring
4 |
5 | if [[ "$GIT_PUSH_OPTION_COUNT" == 1 && "$GIT_PUSH_OPTION_0" == "mirror" ]]; then
6 | exit 0
7 | fi
8 |
9 | # acquire the push lock
10 | PRIMARY_SERVER="TBD"
11 | REPO_NAME="TBD"
12 |
13 | if ! ssh "$PRIMARY_SERVER" "bin/fs_advisory_lock trylock \"\${TMPDIR:-/tmp}/$REPO_NAME\".mppush"; then
14 | echo "failed to acquire push lock"
15 | exit 1
16 | fi
17 |
18 | :
19 |
--------------------------------------------------------------------------------
/githooks/pre-svn-dcommit:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | git stash -u
4 |
5 | if ! git svn rebase; then
6 | echo "WARNING: rebase failed"
7 | exit 1;
8 | fi
9 |
10 | exit 0
11 |
--------------------------------------------------------------------------------
/githooks/pre-svn-rebase:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | git stash -u
4 |
--------------------------------------------------------------------------------
/install_bashfiles.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function _platform() {
4 | local kernel
5 | kernel="$(uname -s)"
6 | case "$kernel" in
7 | Linux)
8 | echo "ubuntu"
9 | ;;
10 | Darwin)
11 | echo "macos"
12 | ;;
13 | esac
14 | }
15 |
16 | function do_ipython_install() {
17 | # determine the folder this script is in
18 | local ROOTDIR
19 | ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
20 | local IPYTHON_PROFILE_DIR
21 | IPYTHON_PROFILE_DIR="$(ipython locate profile)"
22 | # shellcheck disable=SC2181
23 | if [[ $? != 0 ]]; then
24 | return
25 | fi
26 |
27 | for ipython_config_file in "$ROOTDIR/.ipython/"*; do
28 | ln -s -n "$ipython_config_file" "$IPYTHON_PROFILE_DIR/$(basename "$ipython_config_file")" 2>/dev/null
29 | done
30 | }
31 |
32 | function remove_broken_symlinks() {
33 | local ROOTDIR
34 | ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
35 | local target="$1"
36 |
37 | shopt -s dotglob
38 | for file in "$target/"*; do
39 | if [[ -h "$file" && ! -e "$file" ]]; then
40 | local symlink_target
41 | symlink_target="$(readlink -n "$file")"
42 | [[ "$symlink_target" = "$ROOTDIR"/* ]] && rm "$file"
43 | fi
44 | done
45 | }
46 |
47 | function do_install() {
48 | # if we're being piped into bash, then clone
49 | if [[ ! -t 0 && "$0" == "bash" && -z "${BASH_SOURCE[0]}" ]]; then
50 | if [[ -e "$HOME/bashfiles" ]]; then
51 | echo "$HOME/bashfiles already exists. Perhaps you meant to run $HOME/bashfiles/install_bashfiles.bash?"
52 | return 1
53 | fi
54 | git clone https://github.com/stevenkaras/bashfiles.git "$HOME/bashfiles" && return $?
55 | "$HOME"/bashfiles/install_bashfiles.bash
56 | return $?
57 | fi
58 |
59 | # determine the folder this script is in
60 | local ROOTDIR
61 | ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
62 |
63 | remove_broken_symlinks "$HOME"
64 | for bashfile in "$ROOTDIR"/.bash*; do
65 | local bashfilename
66 | bashfilename="$(basename "$bashfile")"
67 | # don't accidentally create recursive symlinks
68 | if [[ ! -h "$bashfile" ]]; then
69 | ln -s -n "$bashfile" "$HOME/$bashfilename" 2>/dev/null
70 | fi
71 | done
72 | local platform
73 | platform=$(_platform)
74 | # inject the bashfiles
75 | if ! grep -E "$HOME/.bashrc" -e "(\\.|source)\\s+('|\")?($HOME|\\\$HOME)/.bashlib" >/dev/null; then
76 | cat <<-BASH >> "$HOME/.bashrc"
77 | if [[ -f "\$HOME/.bashrc_$platform" ]]; then
78 | . "\$HOME/.bashrc_$platform"
79 | fi
80 |
81 | if [[ -f "\$HOME/.bashlib" ]]; then
82 | . "\$HOME/.bashlib"
83 | fi
84 |
85 | BASH
86 | fi
87 |
88 | # warn about problematic history declarations in .bashrc, /etc/bash.bashrc, etc
89 | for bashfile in "$HOME/.bash_profile" "$HOME/.profile" "$HOME/.bashrc" "/etc/bash.bashrc" "/etc/bashrc"; do
90 | if [[ -f "$bashfile" ]]; then
91 | if grep -e '^[^#]*HISTSIZE=[0-9]' "$bashfile"; then
92 | echo "WARNING: $bashfile sets HISTSIZE. This is known to truncate history files even though we set it to unlimited"
93 | fi
94 | fi
95 | done
96 |
97 | # Setup binary links
98 | mkdir -p "$HOME/bin"
99 | remove_broken_symlinks "$HOME/bin"
100 | for binary in "$ROOTDIR"/bin/*; do
101 | ln -s -n "$binary" "$HOME/bin/$(basename "$binary")" 2>/dev/null
102 | done
103 | for ssh_binary in "$ROOTDIR"/ssh/*.bash; do
104 | ln -s -n "$ssh_binary" "$HOME/bin/$(basename "${ssh_binary%%.bash}")" 2>/dev/null
105 | done
106 |
107 | # other files to symlink
108 | for otherfile in .agignore .tmux.conf .tmux_profile .vim .irbrc .psqlrc .lessfilter .inputrc; do
109 | ln -s -n "$ROOTDIR/$otherfile" "$HOME/$otherfile" 2>/dev/null
110 | done
111 |
112 | # ipython config installation
113 | if type -t ipython >/dev/null; then
114 | do_ipython_install
115 | fi
116 |
117 | # Migrate over some stuff to XDG style
118 | [[ -z "$XDG_CONFIG_HOME" ]] && export XDG_CONFIG_HOME="$HOME/.config"
119 |
120 | # git: To be removed no earlier than 20190601
121 | mkdir -p "$XDG_CONFIG_HOME/git"
122 | [[ -e "$HOME/.gitconfig" ]] && mv "$HOME/.gitconfig" "$XDG_CONFIG_HOME/git/config"
123 | # shellcheck disable=SC2088
124 | [[ ! -e "$HOME/.gitignore_global" && "$(git config --global core.excludesfile)" == "~/.gitignore_global" ]] && git config --global --unset core.excludesfile
125 |
126 | # symlink XDG configs
127 | remove_broken_symlinks "$XDG_CONFIG_HOME"
128 | for config_entry in "$ROOTDIR/.config"/*; do
129 | if [[ -d "$config_entry" ]]; then
130 | mkdir -p "$XDG_CONFIG_HOME/$(basename "$config_entry")"
131 | remove_broken_symlinks "$XDG_CONFIG_HOME/$(basename "$config_entry")"
132 | for config_file in "$config_entry"/*; do
133 | ln -s -n "$config_file" "$XDG_CONFIG_HOME/$(basename "$config_entry")/$(basename "$config_file")" 2>/dev/null
134 | done
135 | elif [[ -f "$config_entry" ]]; then
136 | ln -s -n "$config_entry" "$XDG_CONFIG_HOME/$(basename "$config_entry")" 2>/dev/null
137 | fi
138 | done
139 | ln -s -n "$XDG_CONFIG_HOME" "$ROOTDIR/.config-real" 2>/dev/null # convenience symlink
140 |
141 | # copy over templates
142 | [[ ! -e "$HOME/.bash_features" ]] && cp "$ROOTDIR/templates/.bash_features" "$HOME/.bash_features"
143 | ln -s -n "$HOME/.bash_features" "$ROOTDIR/.bash_features" 2>/dev/null # convenience symlink
144 | for config_folder in "$ROOTDIR/templates/.config"/*; do
145 | mkdir -p "$XDG_CONFIG_HOME/$(basename "$config_folder")"
146 | for config_file in "$config_folder"/*; do
147 | if [[ ! -e "$XDG_CONFIG_HOME/$(basename "$config_folder")/$(basename "$config_file")" ]]; then
148 | cp "$config_file" "$XDG_CONFIG_HOME/$(basename "$config_folder")/$(basename "$config_file")"
149 | fi
150 | done
151 | done
152 |
153 | # symlink the local profile files into the repo for convenience
154 | [[ -f "$HOME/.profile" ]] && ln -s -n "$HOME/.profile" "$ROOTDIR/.profile" 2>/dev/null
155 | [[ -f "$HOME/.bash_profile" ]] && ln -s -n "$HOME/.bash_profile" "$ROOTDIR/.bash_profile" 2>/dev/null
156 | [[ -f "$HOME/.bashrc" ]] && ln -s -n "$HOME/.bashrc" "$ROOTDIR/.bashrc" 2>/dev/null
157 | [[ -f "$HOME/.bash_features" ]] && ln -s -n "$HOME/.bash_features" "$ROOTDIR/.bash_features" 2>/dev/null
158 |
159 | return 0
160 | }
161 |
162 | do_install "$@"
163 |
--------------------------------------------------------------------------------
/ssh/list_authorized_keys.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function list_authorized_keys() {
4 | local authorized_keys="${1:-$HOME/.ssh/authorized_keys}"
5 | if [[ ! -f "$authorized_keys" ]]; then
6 | echo "USAGE: list_authorized_keys [FILE]"
7 | return 1
8 | fi
9 | echo "listing keys from $authorized_keys:"
10 |
11 | local -a keys
12 | mapfile keys < "$authorized_keys"
13 | local options
14 | local keytype
15 | local key
16 | local comment
17 | local is_ca_key
18 | local tmpfile="$(mktemp)"
19 |
20 | for key in "${keys[@]}"; do
21 | key="${key:0:-1}"
22 | if [[ "$key" = "#"* || -z "$key" ]]; then
23 | continue
24 | fi
25 |
26 | # parse the key line into [OPTIONS] KEYTYPE KEY COMMENT...
27 | options="${key%% *}"
28 | key="${key#* }"
29 | case "$options" in
30 | # valid key types taken from man 8 sshd (AUTHORIZED_KEYS section)
31 | ecdsa-sha2-nistp256|ecdsa-sha2-nistp384|ecdsa-sha2-nistp521|ssh-ed25519|ssh-dss|ssh-rsa)
32 | keytype="$options"
33 | options=""
34 | ;;
35 | *)
36 | keytype="${key%% *}"
37 | key="${key#* }"
38 | ;;
39 | esac
40 | comment="${key#* }"
41 | key="${key%% *}"
42 |
43 | #TODO: improve options parsing (this may pick up the content of a ENVVAR or command)
44 | if [[ "$options" = *cert-authority* ]]; then
45 | is_ca_key="true"
46 | else
47 | is_ca_key=""
48 | fi
49 |
50 | # SSH requires the use of a file, and is incapable of working off stdin, so write to a tmpfile
51 | printf "%s %s %s" "$keytype" "$key" "$comment" > "$tmpfile"
52 |
53 | if [[ -n "$is_ca_key" ]]; then
54 | echo -n 'CA: '
55 | fi
56 | if [[ -n "$key" && ${key###} = "$key" ]]; then
57 | ssh-keygen -l -f "$tmpfile"
58 | fi
59 | done
60 | rm "$tmpfile"
61 |
62 | return 0
63 | }
64 |
65 | list_authorized_keys "$@"
66 |
--------------------------------------------------------------------------------
/ssh/rotate_all_keys.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function rotate_all_keys() {
4 | for pubkey in $HOME/.ssh/*.id_rsa.pub; do
5 | local keyfile="${pubkey##*/}"
6 | local keyname="${keyfile%*.id_rsa.pub}"
7 | local username="${keyname%@*}"
8 | local remote_host="${keyname#*@}"
9 | # skip known problematic keys (github, etc)
10 | case "$remote_host" in
11 | github.com )
12 | continue
13 | ;;
14 | esac
15 | rotate_ssh_key "$username@$remote_host"
16 | done
17 | }
18 |
19 | rotate_all_keys "$@"
20 |
--------------------------------------------------------------------------------
/ssh/rotate_ssh_key.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function rotate_ssh_key() {
4 | local username="${1%@*}"
5 | if [[ "$username" == "$1" ]]; then
6 | username="$(whoami)"
7 | fi
8 | local server="${1##*@}"
9 | local port="${server##*:}"
10 | if [[ "$port" == "$server" ]]; then
11 | port="${2:-22}"
12 | else
13 | server="${server%:*}"
14 | fi
15 | echo "Rotating key on $username@$server:$port"
16 |
17 | local id_file="$username@$server.id_rsa"
18 | local key_to_revoke="$(ssh-keygen -l -f "$HOME/.ssh/$id_file" | cut -d ' ' -f2)"
19 | mkdir -p "$HOME/.ssh/archive"
20 | local archive_id_file="$(date +%Y%m%d-%H%M%S)-$id_file"
21 | mv "$HOME/.ssh/$id_file" "$HOME/.ssh/archive/$archive_id_file"
22 | mv "$HOME/.ssh/$id_file.pub" "$HOME/.ssh/archive/$archive_id_file.pub"
23 | # ssh into the server and revoke the specific key we're rotating
24 | ssh-keygen -t rsa -b 4096 -C "$(hostname)@$server <$USER_EMAIL>" -f "$HOME/.ssh/$id_file"
25 | local new_key="$(cat "$HOME/.ssh/$id_file.pub")"
26 | ssh -p "$port" -i "$HOME/.ssh/archive/$archive_id_file" "$username@$server" "tee -a \$HOME/.ssh/authorized_keys <<<\"$new_key\" >/dev/null; $(typeset -f revoke_key); revoke_key $key_to_revoke"
27 | ssh-add "$HOME/.ssh/$id_file"
28 | }
29 |
30 | function revoke_key() {
31 | local key_to_revoke="$1"
32 |
33 | # revokes the key with the given fingerprint from the authorized_keys file
34 | local authorized_keys="$HOME/.ssh/authorized_keys"
35 |
36 | while read -r line; do
37 | if [[ -n "$line" && ${line###} == "$line" ]]; then
38 | local fingerprint="$(ssh-keygen -l -f /dev/stdin <<<"$line" | cut -d ' ' -f2)"
39 | if [[ "$fingerprint" != "$key_to_revoke" ]]; then
40 | echo "$line"
41 | fi
42 | else
43 | echo "$line"
44 | fi
45 | done < "$authorized_keys" > "$authorized_keys.new"
46 | mkdir -p "$HOME/.ssh/archive"
47 | cp "$authorized_keys" "$HOME/.ssh/archive/$(date +%Y%m%d-%H%M%S)-authorized_keys"
48 | mv "$authorized_keys.new" "$authorized_keys"
49 | }
50 |
51 | rotate_ssh_key "$@"
52 |
--------------------------------------------------------------------------------
/ssh/setup_ssh_server.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # create a new ssh keypair, setup the server in the ssh config,
4 | # and push the new key into the authorized keys for that server
5 | #
6 | # Roughly equivalent to:
7 | # ssh-keygen -t rsa -b 4096 -C "$(hostname)@server <$USER_EMAIL>" -f ~/.ssh/user@server.id_rsa
8 | # cat ~/.ssh/user@server.id_rsa.pub | ssh user@server "cat >> \$HOME/.ssh/authorized_keys"
9 |
10 | function setup_ssh_server() {
11 | local username="${1%@*}"
12 | if [[ "$username" == "$1" ]]; then
13 | username="$(whoami)"
14 | fi
15 | local server="${1##*@}"
16 | local port="${server##*:}"
17 | if [[ "$port" == "$server" ]]; then
18 | port="${2:-22}"
19 | else
20 | server="${server%:*}"
21 | fi
22 | echo "Setting up $username@$server:$port"
23 |
24 | tee -a "$HOME/.ssh/config" </dev/null
25 |
26 | Host $server
27 | HostName $server
28 | Port $port
29 | User $username
30 | SSH_CONFIG
31 | local id_file="$username@$server.id_rsa"
32 | ssh-keygen -t rsa -b 4096 -C "$(hostname)@$server <$USER_EMAIL>" -f "$HOME/.ssh/$id_file"
33 | local new_key="$(cat "$HOME/.ssh/$id_file.pub")"
34 | ssh -p $port -i "$HOME/.ssh/$id_file" "$username@$server" "tee -a \$HOME/.ssh/authorized_keys <<<\"$new_key\" >/dev/null"
35 | }
36 |
37 | setup_ssh_server "$@"
38 |
--------------------------------------------------------------------------------
/ssh/ssh-acme.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function autosign() {
4 | local tmpfile
5 | tmpfile="$(mktemp)" # for printing errors
6 | [[ $? != 0 ]] && return 1
7 |
8 | local key_to_sign="$1"
9 | local cert_path="${key_to_sign/%.pub/-cert.pub}"
10 | # remove any previous certificate
11 | [[ -e "$cert_path" ]] && rm "$cert_path"
12 |
13 | # clock drift happens, so sign certs to be valid 5 minutes in the past
14 | local validity_period="${2:--5m:+1w}"
15 |
16 | SSHCA_ROOT="$HOME/.ssh/ca" "$HOME/bin/ssh-ca" sign "$key_to_sign" -V "$validity_period" >"$tmpfile" 2>&1
17 | local exit_code="$?"
18 | if [[ "$exit_code" != 0 ]]; then
19 | cat "$tmpfile"
20 | rm "$tmpfile"
21 | return "$exit_code"
22 | fi
23 | rm "$tmpfile"
24 |
25 | cat "$cert_path"
26 | }
27 |
28 | function trust() {
29 | local ACME_ROOT="$SSHCA_ROOT/acme"
30 | mkdir -p "$SSHCA_ROOT/acme"
31 | cp "$1" "$ACME_ROOT"
32 | local key_path="$ACME_ROOT/${1##*/}"
33 |
34 | local fingerprint
35 | fingerprint="$(ssh-keygen -l -f "$key_path")"
36 | local exit_code="$?"
37 | if [[ "$exit_code" != 0 ]]; then
38 | echo "$1 is not a public SSH keyfile"
39 | rm "$key_path"
40 | return "$exit_code"
41 | fi
42 |
43 | local authorized_keys_prefix="command=\"env -i \$HOME/bin/ssh-acme autosign $key_path\""
44 | # the ssh-acme/ssh-ca scripts require a pty...so we can't set no-pty
45 | local authorized_keys_options=',no-agent-forwarding,no-port-forwarding,no-user-rc,no-X11-forwarding'
46 | local authorized_keys_stanza
47 | authorized_keys_stanza="${authorized_keys_prefix}${authorized_keys_options} $(cat "$key_path")"
48 | echo "$authorized_keys_stanza" >> "$HOME/.ssh/authorized_keys"
49 | echo "$(date -u +%FT%T%z):acme-trust: $fingerprint" >> "$SSHCA_ROOT/audit.log"
50 | echo "Trusted $fingerprint to be automatically issued certificates"
51 | }
52 |
53 | function revoke() {
54 | local tmpfile
55 | tmpfile="$(mktemp)"
56 | [[ $? != 0 ]] && return 1
57 |
58 | local ACME_ROOT="$SSHCA_ROOT/acme"
59 | mkdir -p "$SSHCA_ROOT/acme"
60 | local key_to_revoke="$1"
61 | local fingerprint="$1"
62 | if [[ -e "$key_to_revoke" ]]; then
63 | fingerprint="$(ssh-keygen -l -f "$key_to_revoke")"
64 | local exit_code="$?"
65 | if [[ "$exit_code" != 0 ]]; then
66 | echo "$key_to_revoke is not a public SSH keyfile"
67 | return "$exit_code"
68 | fi
69 | fingerprint="$(echo "$fingerprint" | cut -d' ' -f2)"
70 | fi
71 |
72 | # remove the key with the fingerprint from the authorized keys
73 | local authorized_keys="$HOME/.ssh/authorized_keys"
74 | local options
75 | local keytype
76 | local key
77 | local comment
78 |
79 | while read -r line; do
80 | if [[ "$line" = "#"* || -z "$line" ]]; then
81 | echo "$line"
82 | continue
83 | fi
84 |
85 | # parse the key line into [OPTIONS] KEYTYPE KEY COMMENT...
86 | key="$line"
87 | options="${key%% *}"
88 | key="${key#* }"
89 | case "$options" in
90 | # valid key types taken from man 8 sshd (AUTHORIZED_KEYS section)
91 | ecdsa-sha2-nistp256|ecdsa-sha2-nistp384|ecdsa-sha2-nistp521|ssh-ed25519|ssh-dss|ssh-rsa)
92 | keytype="$options"
93 | options=""
94 | ;;
95 | *)
96 | keytype="${key%% *}"
97 | key="${key#* }"
98 | ;;
99 | esac
100 | comment="${key#* }"
101 | key="${key%% *}"
102 |
103 | # SSH requires the use of a file, and is incapable of working off stdin, so write to a tmpfile
104 | printf "%s %s %s" "$keytype" "$key" "$comment" > "$tmpfile"
105 | local stored_fingerprint
106 | stored_fingerprint="$(ssh-keygen -l -f "$tmpfile" | cut -d' ' -f2)"
107 | if [[ "$stored_fingerprint" != "$fingerprint" ]]; then
108 | echo "$line"
109 | fi
110 | done < "$authorized_keys" > "$authorized_keys.new"
111 | mv "$authorized_keys.new" "$authorized_keys"
112 |
113 | # delete the stored keyfile
114 | for stored_keyfile in "$ACME_ROOT/"*; do
115 | local stored_fingerprint
116 | stored_fingerprint="$(ssh-keygen -l -f "$stored_keyfile" | cut -d' ' -f2)"
117 | [[ "$stored_fingerprint" == "$fingerprint" ]] && rm "$stored_keyfile"
118 | done
119 | echo "$(date -u +%FT%T%z):acme-revoke: $fingerprint" >> "$SSHCA_ROOT/audit.log"
120 | echo "$fingerprint has been revoked and will not be issued any new certificates"
121 | }
122 |
123 | function find_ca_root() {
124 | if [[ -n "$SSHCA_ROOT" ]]; then
125 | return 0
126 | fi
127 |
128 | local default="$HOME/.ssh/ca"
129 | if [[ -d "$default" ]]; then
130 | export SSHCA_ROOT="$default"
131 | return 0
132 | fi
133 |
134 | echo "SSH CA not set up"
135 | return 1
136 | }
137 |
138 | function show_usage() {
139 | local prog="${0##*/}"
140 | cat <<-HELPMESSAGE
141 | $prog trust KEYFILE # Trust a key to be issued certificates automatically
142 | $prog trusthost KEYFILE # Trust a key to be issued host certificates automatically
143 | $prog revoke [KEYFILE|FINGERPRINT] # Revoke a key, so it cannot be issued any more certificates
144 | HELPMESSAGE
145 | if [[ "$1" == "-v" || "$1" == "--verbose" ]]; then
146 | cat <<-VERBOSEHELP
147 |
148 | $prog, along with ssh-ca, allow you to automatically issue SSH certificates
149 | VERBOSEHELP
150 | fi
151 | }
152 |
153 | function main() {
154 | local subcommand="$1"
155 | shift
156 | case "$subcommand" in
157 | autosign)
158 | find_ca_root || exit $?
159 | autosign "$@"
160 | exit $?
161 | ;;
162 | trust)
163 | find_ca_root || exit $?
164 | trust "$@"
165 | exit $?
166 | ;;
167 | revoke)
168 | find_ca_root || exit $?
169 | revoke "$@"
170 | exit $?
171 | ;;
172 | -?|-h|--help|help|"")
173 | >&2 show_usage "$@"
174 | exit $?
175 | ;;
176 | *)
177 | echo "Unknown command: $subcommand"
178 | echo ""
179 | >&2 show_usage
180 | exit 2
181 | ;;
182 | esac
183 | }
184 |
185 | main "$@"
186 |
--------------------------------------------------------------------------------
/ssh/ssh-auto.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | #TODO: think about this
4 | #TODO: what type of syntax do I want? HOST -> ACME, or ACME -> HOSTS
5 | #TODO: wildcards? patterns? enterprise? multi-home?
6 | function load_config() {
7 | declare -A servers
8 | local current_host=""
9 | while IFS="" read -r line; do
10 | line="${line###}"
11 | case "$line" in
12 | "#"*) ;;
13 | "ACME "*)
14 | default_acme="${line#*ACME }"
15 | current_host=""
16 | ;;
17 | "Host "*)
18 | current_host="${line#*Host }"
19 | ;;
20 | *"ACME"*)
21 | ;;
22 | esac
23 | done
24 | }
25 |
26 | function get_acme_cert() {
27 | local acme_server="$1"
28 | local cert_file="$2"
29 | ssh "$acme_server" : > "$cert_file"
30 | # TODO: verify that we got a certificate
31 | }
32 |
33 | function is_cert_valid() {
34 | local validity_line="$(ssh-keygen -L -f "$1" | grep -e 'Valid:' | cut -d: -f2-)"
35 | if [[ "$validity_line" == *forever ]]; then
36 | return 0
37 | fi
38 | local validity_period="$(echo "$validity_line" | awk '{print $2 ">" $4}')"
39 | local from="${validity_period%>*}"
40 | local to="${validity_period#*>}"
41 | local now="$(date +%FT%T)"
42 | echo "valid from $from until $to"
43 | [[ "$from" < "$now" ]] && [[ "$now" < "$to" ]]
44 | }
45 |
46 | # function load_config() {
47 | # # TODO: either load from omnibus file (~/.ssh/configfile)
48 | # # TODO: or read from file in config dir (~/.ssh/configdir/servername)
49 | # }
50 |
51 | # is_cert_valid "$@" && echo "cert is valid" || echo "cert is not currently valid"
52 |
53 | load_config
54 | echo "${servers["$1"]:-$default_acme}"
55 |
--------------------------------------------------------------------------------
/ssh/ssh-ca.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function setup_ca() {
4 | local target="${1:-$HOME/.ssh/ca}"
5 | if [[ -e "$target" ]]; then
6 | if [[ -d "$target" && -z "$(ls -A "$target")" ]]; then
7 | # target is an empty directory
8 | :
9 | else
10 | >&2 echo "$target already exists. not setting up SSH CA"
11 | return 1
12 | fi
13 | fi
14 | mkdir -p "$target"
15 | chmod 755 "$target"
16 | mkdir -p "$target/private"
17 | chmod 700 "$target/private"
18 | mkdir -p "$target/certs"
19 | touch "$target/audit.log"
20 | chmod 644 "$target/audit.log"
21 | echo "1" > "$target/next_cert_id"
22 | chmod 644 "$target/next_cert_id"
23 | echo "1" > "$target/next_krl_id"
24 | chmod 644 "$target/next_krl_id"
25 | ssh-keygen -t rsa -b 4096 -f "$target/private/ca_key" -C "CA by $USER_EMAIL" || return $?
26 | cp "$target/private/ca_key.pub" "$target/ca.pub"
27 | touch "$target/krl.source"
28 | ssh-keygen -s "$target/private/ca_key" -z 0 -k -f "$target/krl"
29 | }
30 |
31 | function _save_key_from_stdin() {
32 | local tmpdir
33 | tmpdir="$(mktemp -d)"
34 | # shellcheck disable=SC2181
35 | if [[ $? != 0 ]]; then
36 | >&2 echo "Failed to create temporary directory for stdin input"
37 | fi
38 | cat > "$tmpdir/ssh_id.pub"
39 | echo "$tmpdir/ssh_id.pub"
40 | }
41 |
42 | function _cleanup_key_from_stdin() {
43 | local key_to_sign="$1"
44 | local cert_path="${key_to_sign/%.pub/-cert.pub}"
45 | cat "$cert_path"
46 | rm "$cert_path" "$key_to_sign"
47 | rmdir "$(dirname "$cert_path")"
48 | }
49 |
50 | function sign_key() {
51 | local cert_id
52 | cert_id="$(cat "$SSHCA_ROOT/next_cert_id")"
53 | local key_to_sign="$1"
54 | local saved_key_from_stdin=""
55 | shift
56 | if [[ -z "$key_to_sign" || "$key_to_sign" == "-" ]]; then
57 | key_to_sign="$(_save_key_from_stdin)"
58 | saved_key_from_stdin=true
59 | fi
60 | if [[ ! -f "$key_to_sign" ]]; then
61 | >&2 echo "$key_to_sign does not exist"
62 | return 1
63 | fi
64 | if [[ "$key_to_sign" != *.pub ]]; then
65 | if [[ -f "$key_to_sign.pub" ]]; then
66 | key_to_sign="$key_to_sign.pub"
67 | else
68 | >&2 echo "refusing to sign non-key $key_to_sign"
69 | return 1
70 | fi
71 | fi
72 | local key_identity
73 | key_identity="$(ssh-keygen -l -f "$key_to_sign")"
74 | # local key_comment
75 | # key_comment="$(echo "$key_identity" | cut -d' ' -f4-)"
76 | local cert_path="${key_to_sign/%.pub/-cert.pub}"
77 | local cert_name
78 | cert_name="$(basename "$cert_path")"
79 | ssh-keygen -s "$SSHCA_ROOT/private/ca_key" -I "$cert_id-$USER_EMAIL" -z "$cert_id" "$@" "$key_to_sign" || return $?
80 | echo $((cert_id + 1)) > "$SSHCA_ROOT/next_cert_id"
81 | echo "$(date -u +%FT%T%z):sign:$cert_id: $key_identity" >> "$SSHCA_ROOT/audit.log"
82 | cp "$cert_path" "$SSHCA_ROOT/certs/${cert_id}-${cert_name}"
83 | if [[ -n "$saved_key_from_stdin" ]]; then
84 | _cleanup_key_from_stdin "$key_to_sign"
85 | fi
86 | }
87 |
88 | function sign_host_keys() {
89 | while IFS= read -r line; do
90 | $line
91 | done < <(:)
92 | key_to_sign="$(_save_key_from_stdin)"
93 | _sign_host_key "$key_to_sign" "$@"
94 | _cleanup_key_from_stdin "$key_to_sign"
95 | }
96 |
97 | function sign_host_key() {
98 | local key_to_sign="$1"
99 | shift
100 | if [[ -f "$key_to_sign" ]]; then
101 | _sign_host_key "$key_to_sign" "$@"
102 | elif [[ -z "$key_to_sign" || "$key_to_sign" == "-" ]]; then
103 | key_to_sign="$(_save_key_from_stdin)"
104 | _sign_host_key "$key_to_sign" "$@"
105 | _cleanup_key_from_stdin "$key_to_sign"
106 | else
107 | # assume it's a server
108 | local server="$key_to_sign"
109 | local port="${server##*:}"
110 | if [[ "$port" == "$server" ]]; then
111 | port="22"
112 | else
113 | server="${server%:*}"
114 | fi
115 | local scan_result
116 | scan_result="$(ssh-keyscan -p "$port" "$server" 2>/dev/null | grep -v -e '^#' | head -n 1)"
117 | local scan_key="${scan_result#* }"
118 | if [[ -z "$scan_key" ]]; then
119 | >&2 echo "$key_to_sign is not a public key file, and ssh-keyscan failed"
120 | return 1
121 | fi
122 | key_to_sign="$(echo "$scan_key" | _save_key_from_stdin)"
123 | _sign_host_key "$key_to_sign" "$@"
124 | _cleanup_key_from_stdin "$key_to_sign"
125 | fi
126 | }
127 |
128 | function _sign_host_key() {
129 | local key_to_sign="$1"
130 | local cert_path="${key_to_sign/%.pub/-cert.pub}"
131 | local cert_file="${cert_path##*/}"
132 | shift
133 | if sign_key "$key_to_sign" -h "$@"; then
134 | >&2 echo "Signed key for $server in $cert_file"
135 | >&2 echo "For your sshd to use the certificate, run this on the server:"
136 | >&2 echo "printf 'HostCertificate /etc/ssh/$cert_file' | sudo tee -a /etc/ssh/sshd_config >/dev/null"
137 | else
138 | >&2 echo "failed to sign $key_type from $server"
139 | return 1
140 | fi
141 | }
142 |
143 | function revoke() {
144 | local krl_id
145 | krl_id="$(cat "$SSHCA_ROOT/next_krl_id")"
146 | # first, build the KRL actions
147 | for arg in "$@"; do
148 | if [[ -f "$arg" ]]; then
149 | cat "$arg" >> "$SSHCA_ROOT/krl.source"
150 | else
151 | echo "$arg" >> "$SSHCA_ROOT/krl.source"
152 | fi
153 | echo "$(date -u +%FT%T%z):revoke: $arg" >> "$SSHCA_ROOT/audit.log"
154 | done
155 | ssh-keygen -s "$SSHCA_ROOT/private/ca_key" -z "$krl_id" -k -u -f "$SSHCA_ROOT/krl" "$SSHCA_ROOT/krl.source" || return $?
156 | echo $((krl_id + 1)) > "$SSHCA_ROOT/next_krl_id"
157 | echo "$(date -u +%FT%T%z):revoke: updated krl to revision $krl_id" >> "$SSHCA_ROOT/audit.log"
158 | }
159 |
160 | function trust_ca() {
161 | local username="${1%@*}"
162 | if [[ "$username" == "$1" ]]; then
163 | username="$USER"
164 | fi
165 | local server="${1##*@}"
166 | local port="${server##*:}"
167 | if [[ "$port" == "$server" ]]; then
168 | port="22"
169 | else
170 | server="${server%:*}"
171 | fi
172 | shift
173 |
174 | case "$server" in
175 | localhost|--local|"")
176 | _knownhosts_ca_stanza "$@" >> "$HOME/.ssh/known_hosts"
177 | ;;
178 | *)
179 | >&2 echo "Setting CA as authorized for $username@$server:$port"
180 | # shellcheck disable=SC2029
181 | ssh -p $port "$username@$server" "tee -a \$HOME/.ssh/authorized_keys <<<\"$(_authorized_key_ca_stanza "$@")\" >/dev/null"
182 | ;;
183 | esac
184 | }
185 |
186 | function _authorized_key_ca_stanza() {
187 | local IFS=","
188 | local principals="$*"
189 | unset IFS
190 | if [[ -n "$principals" ]]; then
191 | principals=" principals=\"$principals\""
192 | fi
193 | local ca_pub
194 | ca_pub="$(cat "$SSHCA_ROOT/ca.pub")"
195 | echo "cert-authority$principals $ca_pub"
196 | }
197 |
198 | function _knownhosts_ca_stanza() {
199 | local hosts="$*"
200 | if [[ -z "$hosts" ]]; then
201 | hosts="*"
202 | fi
203 | local ca_pub
204 | ca_pub="$(cat "$SSHCA_ROOT/ca.pub")"
205 | echo "@cert-authority $hosts $ca_pub"
206 | }
207 |
208 | function find_ca_root() {
209 | if [[ -n "$SSHCA_ROOT" ]]; then
210 | return 0
211 | fi
212 |
213 | local default="$HOME/.ssh/ca"
214 | if [[ -d "$default" ]]; then
215 | export SSHCA_ROOT="$default"
216 | return 0
217 | fi
218 |
219 | >&2 echo "SSH CA not set up. Run $(basename "$0") setup"
220 | return 1
221 | }
222 |
223 | function trustconfig() {
224 | _authorized_key_ca_stanza "$1"
225 | shift
226 | _knownhosts_ca_stanza "$1"
227 | }
228 |
229 | function show_usage() {
230 | local prog
231 | prog="$(basename "$0")"
232 | cat <<-HELPMESSAGE
233 | $prog setup # perform the initial setup to start acting as a SSH CA
234 | $prog install [USER@]SERVER[:PORT] [PRINCIPALS...] # install the CA certificate on the given server (limited to certs with the given principals)
235 | $prog sign KEY [-n PRINCIPALS] [OPTIONS] # sign the given KEY
236 | $prog signhost KEY [-n PRINCIPALS] [OPTIONS] # sign the given host KEY
237 | $prog signhosts [-n PRINCIPLES] [OPTIONS] # sign host keys from STDIN (one per line)
238 | $prog revoke CERTS... # revoke a certificate
239 | $prog trustconfig [PRINCIPALS [HOSTS]] # print the config stanzas for trusting keys signed by the CA
240 | $prog implode # delete the CA permanently
241 | HELPMESSAGE
242 | if [[ "$1" == "-v" || "$1" == "--verbose" ]]; then
243 | cat <<-VERBOSEHELP
244 |
245 | SSH can act as a certificate authority, allowing shorter authorized_keys and known_hosts files to be distributed as part of VM images. $prog has many shortcuts to make operating a CA for SSH much easier. First, set up the CA, generate the key you'd like to sign, and sign using this script, and use the signed certificate when connecting to the server. As a convenience, you can quickly install the relevant configuration stanza on a remote server.
246 |
247 | If your signed keys are compromised, you can revoke them using the syntax defined in KEY REVOCATION LIST section of the ssh-keygen manpage.
248 | VERBOSEHELP
249 | fi
250 | }
251 |
252 | function main() {
253 | local subcommand="$1"
254 | shift
255 | case "$subcommand" in
256 | setup)
257 | setup_ca "$@"
258 | exit $?
259 | ;;
260 | install)
261 | find_ca_root || exit $?
262 | trust_ca "$@"
263 | exit $?
264 | ;;
265 | revoke)
266 | find_ca_root || exit $?
267 | revoke "$@"
268 | exit $?
269 | ;;
270 | sign)
271 | find_ca_root || exit $?
272 | sign_key "$@"
273 | exit $?
274 | ;;
275 | signhosts)
276 | find_ca_root || exit $?
277 | sign_host_keys "$@"
278 | exit $?
279 | ;;
280 | signhost)
281 | find_ca_root || exit $?
282 | sign_host_key "$@"
283 | exit $?
284 | ;;
285 | trustconfig)
286 | find_ca_root || exit $?
287 | trustconfig "$@"
288 | exit $?
289 | ;;
290 | selfdestruct|uninstall|implode)
291 | find_ca_root || exit $?
292 | read -r -p "About to delete $SSHCA_ROOT. Type yes to continue: "
293 | if [[ "$REPLY" == "yes" ]]; then
294 | local removal_command="rm"
295 | type -t srm >/dev/null 2>&1 && removal_command="srm"
296 | "$removal_command" -r "$SSHCA_ROOT"
297 | else
298 | >&2 echo "Not deleting $SSHCA_ROOT"
299 | fi
300 | exit $?
301 | ;;
302 | -\?|-h|--help|help|"")
303 | >&2 show_usage "$@"
304 | exit $?
305 | ;;
306 | *)
307 | >&2 echo "Unknown command: $subcommand"
308 | >&2 echo ""
309 | >&2 show_usage
310 | exit 2
311 | ;;
312 | esac
313 | }
314 |
315 | if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
316 | main "$@"
317 | fi
318 |
--------------------------------------------------------------------------------
/ssh/ssh-manager.bash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function init_manager() {
4 | # setup a basic framework in the current directory
5 | mkdir -p authorized_keys
6 | cat <<-AUTHORIZED_KEYS >>authorized_keys/base
7 | # This file follows the standard sshd(8) authorized_keys format
8 | AUTHORIZED_KEYS
9 | if [[ -f "$HOME/.ssh/ca/ca.pub" ]]; then
10 | { echo -n "cert-authority "; cat "$HOME/.ssh/ca/ca.pub"; } >> authorized_keys/base
11 | elif [[ -f "$HOME/.ssh/id_rsa.pub" ]]; then
12 | cat < "$HOME/.ssh/id_rsa.pub" >> authorized_keys/base
13 | fi
14 |
15 | mkdir -p config
16 | cat <<-SSHCONFIG >>config/base
17 | # This file follows the standard ssh_config(5) config format
18 |
19 | IdentityFile ~/.ssh/id_rsa
20 | IdentitiesOnly yes
21 | ServerAliveInterval 30
22 | UseRoaming no
23 | # Disable client side roaming, as per http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-0777 and http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-0778
24 | SSHCONFIG
25 | }
26 |
27 | function enumerate_servers() {
28 | find "$PWD"/"$1" -name '*@*' -exec basename {} \;
29 | }
30 |
31 | function distribute() {
32 | local action="$1"
33 | shift
34 |
35 | case "$1" in
36 | -s|--seq|--sequential)
37 | shift
38 | local do_with="xargs -I {}"
39 | ;;
40 | *)
41 | local do_with="parallel --no-notice -j0"
42 | ;;
43 | esac
44 |
45 | if [[ $# -eq 0 ]]; then
46 | enumerate_servers "$action" | $do_with "$0" "push_$action" {}
47 | else
48 | echo "$@" | $do_with "$0" "push_$action" {}
49 | fi
50 | }
51 |
52 | function push_all() {
53 | # push out the managed configurations/authorized keys
54 | distribute authorized_keys "$@"
55 | distribute config "$@"
56 | }
57 |
58 | function compile_file() {
59 | local source_file="$1"
60 | [[ ! -f "$source_file" ]] && return 1
61 |
62 | local TMPFILE
63 | TMPFILE="$(mktemp)"
64 | pushd "$(dirname "$source_file")" >/dev/null 2>&1
65 | m4 <"$(basename "$source_file")" >"$TMPFILE"
66 | popd >/dev/null 2>&1
67 | echo "$TMPFILE"
68 | return 0
69 | }
70 |
71 | function push_config() {
72 | local compiled_config
73 | compiled_config="$(compile_file "$PWD/config/$1")"
74 | # shellcheck disable=SC2181
75 | [[ $? -ne 0 ]] && return $?
76 | # echo "about to push config to $1"
77 |
78 | if [[ "${1##*@}" == "localhost" && "${1%@*}" == "$USER" ]]; then
79 | cp "$compiled_config" "$HOME/.ssh/config"
80 | else
81 | scp -o ClearAllForwardings=yes "$compiled_config" "$1:.ssh/config"
82 | fi
83 | rm "$compiled_config"
84 | }
85 |
86 | function _check_authorization() {
87 | local authorized_keys_file="$1"
88 | local target="$2"
89 |
90 | local identity_file
91 | identity_file="$(ssh -G "$target" -T | grep -o -P -e '(?<=^identityfile ).*$')"
92 | # ssh will try to offer up all identity files in ~/.ssh, so check those in addition to the configured one
93 | for candidate_identity in "${identity_file}" ~/.ssh/*.pub; do
94 | if [[ "$candidate_identity" = *.pub ]]; then
95 | candidate_identity="${candidate_identity%.pub}"
96 | fi
97 | if [[ -f "$candidate_identity.pub" ]]; then
98 | # echo "checking validity of $candidate_identity.pub"
99 | local public_key
100 | public_key="$(cut -d' ' -f2 "$candidate_identity.pub")"
101 | # echo "searching for $public_key"
102 | # grep -v -P -e '^\s*#' "$authorized_keys_file" | grep -o -P -e 'ssh-\S+ \S+'
103 | grep -v -P -e '^\s*#' "$authorized_keys_file" | grep -o -P -e 'ssh-\S+ \S+' | grep -F -e "$public_key" -q && return 0
104 | fi
105 | if [[ -f "$candidate_identity-cert.pub" ]]; then
106 | # due to a limitation in ssh-keygen, we can only check the fingerprint, not the full key
107 | # echo "checking validity of $candidate_identity-cert.pub"
108 | local ca_fingerprint
109 | ca_fingerprint="$(ssh-keygen -L -f "$candidate_identity-cert.pub" | grep -o -e 'Signing CA: .*$' | cut -d' ' -f4-)"
110 | # echo "searching for $ca_fingerprint"
111 | # grep -v -P -e '^\s*#' "$authorized_keys_file" | grep -o -P -e 'ssh-\S+ \S+' | xargs -I % bash -c 'ssh-keygen -l -f <(echo "%")'
112 | grep -v -P -e '^\s*#' "$authorized_keys_file" | grep -o -P -e 'ssh-\S+ \S+' | xargs -I % bash -c 'ssh-keygen -l -f <(echo "%")' 2>/dev/null | grep -F -e "$ca_fingerprint" -q && return 0
113 | fi
114 | done
115 |
116 | return 1
117 | }
118 |
119 | function push_authorized_keys() {
120 | local compiled_authorized_keys
121 | compiled_authorized_keys="$(compile_file "$PWD/authorized_keys/$1")"
122 | # shellcheck disable=SC2181
123 | [[ $? -ne 0 ]] && return $?
124 | # echo "about to push authorized_keys to $1"
125 |
126 | if [[ "${1##*@}" == "localhost" && "${1%@*}" == "$USER" ]]; then
127 | cp "$compiled_authorized_keys" "$HOME/.ssh/authorized_keys"
128 | else
129 | # VALIDATION: ensure we don't lose access
130 | if ! _check_authorization "$compiled_authorized_keys" "$1"; then
131 | echo "WARNING! Refusing to push authorized_keys that would not allow future access to $1"
132 | return 2
133 | fi
134 |
135 | expected_size="$(stat -c %s "$compiled_authorized_keys")"
136 | # shellcheck disable=SC2002
137 | cat "$compiled_authorized_keys" | ssh -o ClearAllForwardings=yes "$1" bash -c 'cat > ~/.ssh/temp_authorized_keys && [[ -f .ssh/temp_authorized_keys ]] && (( $(stat -c %s ~/.ssh/temp_authorized_keys) == '"$expected_size"' )) && chmod 0600 ~/.ssh/temp_authorized_keys && mv ~/.ssh/temp_authorized_keys ~/.ssh/authorized_keys'
138 | fi
139 | rm "$compiled_authorized_keys"
140 | }
141 |
142 | function show_usage() {
143 | local prog
144 | prog="$(basename "$0")"
145 | cat <<-HELPMESSAGE
146 | $prog init # setup a managed set of servers
147 | $prog push [-s] [SERVER, ...] # push the configuration and authorized keys to all configured servers
148 | $prog compile_config SERVER # compile the ssh_config for the given server
149 | $prog push_config SERVER # push the ssh_config to the given server
150 | $prog compile_authorized_keys SERVER # compile the authorized_keys for the given server
151 | $prog push_authorized_keys SERVER # push the authorized_keys to the given server
152 | HELPMESSAGE
153 | if [[ "$1" == "-v" || "$1" == "--verbose" ]]; then
154 | cat <<-VERBOSEHELP
155 |
156 | SSH can be a pain to configure for multiple servers. This script makes the process of managing the authorized keys and client configuration much easier.
157 |
158 | The configuration files are placed as authorized_keys/user@host and config/user@host.
159 |
160 | Files are processed using m4, such that common sections can be shared using include(other_file) directives.
161 | VERBOSEHELP
162 | fi
163 | }
164 |
165 | function main() {
166 | local subcommand="$1"
167 | shift
168 | case "$subcommand" in
169 | setup|init)
170 | init_manager "$@"
171 | exit $?
172 | ;;
173 | compile_config)
174 | local compiled_config
175 | local success
176 | compiled_config="$(compile_file "$PWD/config/$1")"
177 | success=$?
178 | if [[ $success -ne 0 ]]; then
179 | exit $success
180 | fi
181 | cat "$compiled_config"
182 | rm "$compiled_config"
183 | exit 0
184 | ;;
185 | compile_authorized_keys)
186 | local compiled_authorized_keys
187 | local success
188 | compiled_authorized_keys="$(compile_file "$PWD/authorized_keys/$1")"
189 | success=$?
190 | if [[ $success -ne 0 ]]; then
191 | exit $success
192 | fi
193 | cat "$compiled_authorized_keys"
194 | rm "$compiled_authorized_keys"
195 | exit 0
196 | ;;
197 | push)
198 | push_all "$@"
199 | exit $?
200 | ;;
201 | push_config)
202 | push_config "$@"
203 | exit $?
204 | ;;
205 | push_authorized_keys)
206 | push_authorized_keys "$@"
207 | exit $?
208 | ;;
209 | -\?|-h|--help|help|"")
210 | show_usage "$@"
211 | exit $?
212 | ;;
213 | *)
214 | echo "Unknown command: $subcommand"
215 | echo ""
216 | show_usage
217 | exit 2
218 | ;;
219 | esac
220 | }
221 |
222 | main "$@"
223 |
--------------------------------------------------------------------------------
/templates/.bash_features:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # shellcheck disable=SC2034
3 |
4 | BASHFEATURE_DIRENV_ENABLED=false
5 |
--------------------------------------------------------------------------------
/templates/.config/git/config:
--------------------------------------------------------------------------------
1 | [user]
2 | name = Steven Karas
3 | email = steven.karas@gmail.com
4 | [alias]
5 | unstash = stash pop
6 | st = status
7 | br = branch
8 | co = checkout
9 | fix = commit --amend -C HEAD
10 | lol = log --graph --decorate --pretty=oneline --abbrev-commit
11 | lola = log -- graph --decorate --pretty=oneline --abbrev-commit --all
12 | ls = ls-files
13 | act = for-each-ref --sort=-committerdate refs/heads/
14 | # show files ignored by git
15 | ignored = ls-files -o -i --exclude-standard
16 | force-pull = !git fetch && git reset --hard @{u}
17 | dangling-commits = !git fsck --dangling | awk '/dangling commit/ { print $3 }'
18 |
19 | # inspired by Human Git Aliases (http://gggritso.com/human-git-aliases)
20 | unstage = reset -q HEAD --
21 | discard = checkout --
22 |
23 | # brazingly stolen from http://blog.apiaxle.com/post/handy-git-tips-to-stop-you-getting-fired/
24 | assume = update-index --assume-unchanged
25 | unassume = update-index --no-assume-unchanged
26 | assumed = "!git ls-files -v | grep ^h | cut -c 3-"
27 | standup = !git log --branches --remotes --tags --no-merges --author=\"$(git config user.name)\" --since="$(if [[ "Sun" == "$(date +%a)" ]]; then echo "last thursday"; else echo "yesterday"; fi)" --format=%s
28 |
29 | # fixed variant of http://caiustheory.com/git-git-git-git-git/
30 | git = !f() { git "$@"; }; f
31 |
32 | # show commits made directly to this branch
33 | direct = log --first-parent --no-merges
34 | lg = log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)<%an>%Creset' --abbrev-commit
35 | [color]
36 | diff = auto
37 | status = auto
38 | branch = auto
39 | ui = auto
40 | [push]
41 | default = simple
42 | [rebase]
43 | autostash = true
44 | autosquash = true
45 | [pull]
46 | rebase = true
47 | [transfer]
48 | fsckobjects = true
49 | [rerere]
50 | enabled = true
51 | [fetch]
52 | writeCommitGraph = true
53 | [commit]
54 | gpgsign = true
55 | [tag]
56 | forceSignAnnotated = true
57 | gpgsign = true
58 |
--------------------------------------------------------------------------------
/templates/.config/nano/nanorc:
--------------------------------------------------------------------------------
1 | include /usr/local/share/nano*/*.nanorc
2 | include /usr/share/nano*/*.nanorc
3 |
4 | set autoindent
5 | extendsyntax sh linter dash -n
6 | extendsyntax go formatter gofmt -w
7 | set stateflags
8 |
--------------------------------------------------------------------------------
/templates/.ssh/config:
--------------------------------------------------------------------------------
1 | IdentityFile ~/.ssh/id_rsa
2 | IdentitiesOnly yes
3 | ServerAliveInterval 30
4 | # Disable client side roaming, as per http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-0777 and http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-0778
5 | UseRoaming no
6 | # security suggestions as per https://stribika.github.io/2015/01/04/secure-secure-shell.html
7 | KexAlgorithms curve25519-sha256@libssh.org,diffie-hellman-group-exchange-sha256
8 | Ciphers chacha20-poly1305@openssh.com,aes256-gcm@openssh.com,aes128-gcm@openssh.com,aes256-ctr,aes192-ctr,aes128-ctr
9 | MACs hmac-sha2-512-etm@openssh.com,hmac-sha2-256-etm@openssh.com,hmac-ripemd160-etm@openssh.com,umac-128-etm@openssh.com,hmac-sha2-512,hmac-sha2-256,hmac-ripemd160,umac-128@openssh.com
10 |
11 | # recursively resolve hosts, for example:
12 | # `ssh jump+otheruser%%bastion+target` would tunnel from jump to bastion to target, using otheruser on bastion.
13 | #
14 | # Hosts are separated by +, a user is specified by %
15 | # The user for the last host cannot be specified, and must be given with @, or using
16 | # This syntax can be embedded in urls, such as git or svn+ssh urls (hence the awkward separators)
17 | Host *+*
18 | ProxyCommand ssh -W $(echo %h | sed 's/^.*+//;s/^\([^:]*$\)/\1:22/') $(echo %h | sed 's/+[^+]*$//;s/\([^+%%]*\)%%\([^+]*\)$/\2 -l \1/;s/:\([^:+]*\)$/ -p \1/')
19 |
--------------------------------------------------------------------------------