${url}"
7 | done
8 | ) | envchain aws aws s3 cp --content-type text/html - s3://sorah-pub/lock/$(hostname)
9 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "script/cdd"]
2 | path = third_party/cdd
3 | url = https://github.com/m4i/cdd.git
4 | [submodule "third_party/nlnog-ring"]
5 | path = third_party/nlnog-ring
6 | url = https://github.com/NLNOG/nlnog-ring
7 | [submodule "vim/dot.vim/vim-plug"]
8 | path = vim/dot.vim/vim-plug
9 | url = https://github.com/junegunn/vim-plug
10 |
--------------------------------------------------------------------------------
/misc/ubol.yml:
--------------------------------------------------------------------------------
1 | # custom uBOL rules
2 |
3 | ---
4 | id: 1
5 | priority: 10
6 | action:
7 | type: block
8 | condition:
9 | domainType: thirdParty
10 | requestDomains:
11 | - j.wovn.io
12 | ---
13 | id: 2
14 | priority: 10
15 | action:
16 | type: block
17 | condition:
18 | urlFilter: ||cache.img.gmo.jp/gmo/header/script.min.js
19 |
20 |
--------------------------------------------------------------------------------
/claude/ensure-newline.rb:
--------------------------------------------------------------------------------
1 | require 'json'
2 | path = JSON.parse($stdin.read).dig('tool_input', 'file_path')
3 | unless path
4 | puts "No file path provided"
5 | exit 1
6 | end
7 | buf = File.read(path)
8 | unless buf[-1] == "\n"
9 | puts "Ensuring newline: #{path.inspect}"
10 | File.open(path, 'a') do |io|
11 | io.write("\n")
12 | end
13 | end
14 |
--------------------------------------------------------------------------------
/script/cdd_title.rb:
--------------------------------------------------------------------------------
1 | # frozen_string_literal: true
2 | a = ENV["PWD"];
3 | a = a
4 | .sub(ENV["HOME"],"~")
5 | .sub(%r{^~/git/}, '@/')
6 | .sub(%r{^@/github.com/(.+?)/([^/]+)}, "\uE001\\2/")
7 | .split(/\//)
8 | a << "/" if a.empty?
9 | print (a[0] != "\uE001" && a.size > 4 ? a[0..-2].map{|x| x[0] == "\uE001" ? x : x[0] } << a[-1] : a).join("/").gsub(/\uE001/,'')
10 |
--------------------------------------------------------------------------------
/bin/sorah-nw-simple-locate:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if dig +nonssearch +noall +comments SRV _ldap._tcp.dc._msdcs.ds.nkmi.me. | grep -q NOERROR; then
4 | if dig +nonssearch +noall +comments SRV _ldap._tcp.dc._msdcs.ds.nkmi.me. | grep -iq 'recursion requested but not available'; then
5 | echo "unknown"
6 | exit 0
7 | fi
8 | echo "internal"
9 | else
10 | echo "unknown"
11 | fi
12 |
--------------------------------------------------------------------------------
/circus/nginx.ini:
--------------------------------------------------------------------------------
1 | [watcher:nginx]
2 | cmd = nginx -c /Users/sorah/git/config/nginx/local.conf
3 | copy_env = true
4 | send_hup = true
5 | stdout_stream.class = FileStream
6 | stdout_stream.filename = /Users/sorah/.circus/nginx.log
7 | stdout_stream.time_format = %Y-%m-%d %H:%M:%S
8 | stderr_stream.filename = /Users/sorah/.circus/nginx.log
9 | stderr_stream.time_format = %Y-%m-%d %H:%M:%S
10 |
11 |
--------------------------------------------------------------------------------
/misc/dot.irbrc:
--------------------------------------------------------------------------------
1 | require 'irb/completion'
2 | #require 'irbtools'
3 | begin
4 | require 'what_methods'
5 | rescue LoadError; end
6 | require 'pp'
7 |
8 | module Kernel
9 | def pp(*x)
10 | PP.pp(*x)
11 | nil
12 | end
13 |
14 | def clear
15 | system "clear"
16 | end
17 | end
18 |
19 | IRB.conf[:AUTO_INDENT]=true
20 | IRB.conf[:SAVE_HISTORY]=200
21 | IRB.conf[:PROMPT_MODE] = :SIMPLE
22 |
--------------------------------------------------------------------------------
/debuild-docker/Dockerfile.buster:
--------------------------------------------------------------------------------
1 | # See also: https://github.com/sorah/config/blob/master/bin/sorah-debuild
2 | FROM debian:buster
3 |
4 | # just to invalidate cache
5 | ARG BUILDDATE=99999999
6 |
7 | ENV DEBIAN_FRONTEND=noninteractive
8 | RUN mkdir -p /build \
9 | && apt-get update \
10 | && apt-get install -y tzdata debhelper dh-make devscripts gnupg2 vim equivs
11 | RUN mkdir -p -m700 /root/.gnupg
12 |
13 |
--------------------------------------------------------------------------------
/debuild-docker/Dockerfile.trusty:
--------------------------------------------------------------------------------
1 | # See also: https://github.com/sorah/config/blob/master/bin/sorah-debuild
2 | FROM ubuntu:14.04
3 |
4 | # just to invalidate cache
5 | ARG BUILDDATE=99999999
6 |
7 | ENV DEBIAN_FRONTEND=noninteractive
8 | RUN mkdir -p /build \
9 | && apt-get update \
10 | && apt-get install -y tzdata debhelper dh-make devscripts gnupg2 vim equivs
11 | RUN mkdir -p -m700 /root/.gnupg
12 |
13 |
--------------------------------------------------------------------------------
/debuild-docker/Dockerfile.stretch:
--------------------------------------------------------------------------------
1 | # See also: https://github.com/sorah/config/blob/master/bin/sorah-debuild
2 | FROM debian:stretch
3 |
4 | # just to invalidate cache
5 | ARG BUILDDATE=99999999
6 |
7 | ENV DEBIAN_FRONTEND=noninteractive
8 | RUN mkdir -p /build \
9 | && apt-get update \
10 | && apt-get install -y tzdata debhelper dh-make devscripts gnupg2 vim equivs
11 | RUN mkdir -p -m700 /root/.gnupg
12 |
13 |
--------------------------------------------------------------------------------
/debuild-docker/Dockerfile.bookworm:
--------------------------------------------------------------------------------
1 | # See also: https://github.com/sorah/config/blob/master/bin/sorah-debuild
2 | FROM public.ecr.aws/debian/debian:bookworm
3 |
4 | # just to invalidate cache
5 | ARG BUILDDATE=99999999
6 |
7 | ENV DEBIAN_FRONTEND=noninteractive
8 | RUN mkdir -p /build \
9 | && apt-get update \
10 | && apt-get install -y tzdata debhelper dh-make devscripts gnupg2 vim equivs
11 | RUN mkdir -p -m700 /root/.gnupg
12 |
13 |
--------------------------------------------------------------------------------
/debuild-docker/Dockerfile.bullseye:
--------------------------------------------------------------------------------
1 | # See also: https://github.com/sorah/config/blob/master/bin/sorah-debuild
2 | FROM public.ecr.aws/debian/debian:bullseye
3 |
4 | # just to invalidate cache
5 | ARG BUILDDATE=99999999
6 |
7 | ENV DEBIAN_FRONTEND=noninteractive
8 | RUN mkdir -p /build \
9 | && apt-get update \
10 | && apt-get install -y tzdata debhelper dh-make devscripts gnupg2 vim equivs
11 | RUN mkdir -p -m700 /root/.gnupg
12 |
13 |
--------------------------------------------------------------------------------
/circus/wakeup.ini:
--------------------------------------------------------------------------------
1 | [watcher:wakeuper]
2 | cmd = sorah-wakeuper
3 | copy_env = true
4 | stdout_stream.class = FileStream
5 | stdout_stream.filename = /Users/sorah/.circus/wakeup.log
6 | stdout_stream.time_format = %Y-%m-%d %H:%M:%S
7 | stdout_stream.max_bytes = 10000000
8 | stderr_stream.class = FileStream
9 | stderr_stream.filename = /Users/sorah/.circus/wakeup.err
10 | stderr_stream.time_format = %Y-%m-%d %H:%M:%S
11 | stderr_stream.max_bytes = 10000000
12 |
--------------------------------------------------------------------------------
/etc/curl-timing.txt:
--------------------------------------------------------------------------------
1 | --------------------------------------------------\n
2 | time_total: %{time_total}s\n
3 | \n
4 | time_namelookup: %{time_namelookup}s\n
5 | time_connect: %{time_connect}s\n
6 | time_appconnect: %{time_appconnect}s\n
7 | time_pretransfer: %{time_pretransfer}s\n
8 | time_redirect: %{time_redirect}s\n
9 | time_starttransfer: %{time_starttransfer}s\n
10 | --------------------------------------------------\n
11 |
--------------------------------------------------------------------------------
/bin/sorah-gyazo:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | dir="$(mktemp -d)"
5 |
6 | if [ -z "${dir}" ]; then
7 | exit 1
8 | fi
9 |
10 | png="${dir}/a.png"
11 | import "${png}"
12 |
13 | fn="$(TZ=UTC date +'%Y-%m-%d_%H-%M-%S')_$(openssl sha1 -hex -r "${png}" | cut -c 1-7).png"
14 |
15 | envchain aws aws s3 cp --quiet "${png}" "s3://sorah-pub/${fn}"
16 |
17 | url="https://img.sorah.jp/${fn}"
18 |
19 | echo "${url}"
20 | echo -n "${url}" | xsel -ib
21 |
22 | rm "${png}"
23 |
--------------------------------------------------------------------------------
/bin/sorah-ruby-ensure-gems:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'bundler/inline'
3 | ENV['MAKEOPTS'] = '-j9'
4 | gemfile do
5 | source 'https://rubygems.org'
6 | gem 'aws-sdk-ec2'
7 | gem 'aws-sdk-s3'
8 | gem 'aws-sdk-sqs'
9 | gem 'aws-sdk-ecs'
10 | gem 'aws-sdk-ecr'
11 | gem 'aws-sdk-iam'
12 | gem 'aws-sdk-rds'
13 | gem 'aws-sdk-ecrpublic'
14 | gem 'aws-sdk-codebuild'
15 | gem 'solargraph'
16 | gem 'ruby-lsp'
17 | gem 'httpx'
18 | end
19 | puts 'done'
20 |
--------------------------------------------------------------------------------
/bin/sorah-autosshport:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if [ -n "${SORAH_AUTOSSH_PORT}" ]; then
3 | echo "${SORAH_AUTOSSH_PORT}"
4 | exit
5 | fi
6 |
7 | default_iface="$(ip -oneline r get 8.8.8.8|grep -o 'dev .\+'|cut -d' ' -f2)"
8 | if [ -n "${default_iface}" ]; then
9 | num="$(ip -oneline a show dev "${default_iface}"|grep -o 'inet .\+/'|cut -d' ' -f2|cut -d. -f4|cut -d/ -f1)"
10 | num="$(openssl rand -hex 20|sed -e 's/[^0-9]//g' |cut -c 1-2)"
11 | fi
12 |
13 | port=18600
14 | let "port+=${num}"
15 |
16 | echo "${port}"
17 |
--------------------------------------------------------------------------------
/debuild-docker/Dockerfile.xenial:
--------------------------------------------------------------------------------
1 | # See also: https://github.com/sorah/config/blob/master/bin/sorah-debuild
2 | FROM ubuntu:16.04
3 |
4 | # just to invalidate cache
5 | ARG BUILDDATE=99999999
6 |
7 | RUN sed -i -e 's|archive\.ubuntu\.com/ubuntu|ap-northeast-1.ec2.archive.ubuntu.com/ubuntu|g' /etc/apt/sources.list
8 |
9 | ENV DEBIAN_FRONTEND=noninteractive
10 | RUN mkdir -p /build \
11 | && apt-get update \
12 | && apt-get install -y tzdata debhelper dh-make devscripts gnupg2 vim equivs
13 | RUN mkdir -p -m700 /root/.gnupg
14 |
15 |
--------------------------------------------------------------------------------
/debuild-docker/Dockerfile.jammy:
--------------------------------------------------------------------------------
1 | # See also: https://github.com/sorah/config/blob/master/bin/sorah-debuild
2 | FROM public.ecr.aws/ubuntu/ubuntu:22.04
3 |
4 | # just to invalidate cache
5 | ARG BUILDDATE=99999999
6 |
7 | RUN sed -i -e 's|archive\.ubuntu\.com/ubuntu|ap-northeast-1.ec2.archive.ubuntu.com/ubuntu|g' /etc/apt/sources.list
8 |
9 | ENV DEBIAN_FRONTEND=noninteractive
10 | RUN mkdir -p /build \
11 | && apt-get update \
12 | && apt-get install -y tzdata debhelper dh-make devscripts gnupg2 vim equivs
13 | RUN mkdir -p -m700 /root/.gnupg
14 |
15 |
--------------------------------------------------------------------------------
/script/wakeup/Gemfile.lock:
--------------------------------------------------------------------------------
1 | GEM
2 | remote: https://rubygems.org/
3 | specs:
4 | aws-sdk (2.1.35)
5 | aws-sdk-resources (= 2.1.35)
6 | aws-sdk-core (2.1.35)
7 | jmespath (~> 1.0)
8 | aws-sdk-resources (2.1.35)
9 | aws-sdk-core (= 2.1.35)
10 | fluent-logger (0.5.0)
11 | msgpack (>= 0.4.4, < 0.6.0, != 0.5.3, != 0.5.2, != 0.5.1, != 0.5.0)
12 | jmespath (1.1.3)
13 | msgpack (0.5.12)
14 |
15 | PLATFORMS
16 | ruby
17 |
18 | DEPENDENCIES
19 | aws-sdk
20 | fluent-logger
21 |
22 | BUNDLED WITH
23 | 1.10.3
24 |
--------------------------------------------------------------------------------
/debuild-docker/Dockerfile.noble:
--------------------------------------------------------------------------------
1 | # See also: https://github.com/sorah/config/blob/master/bin/sorah-debuild
2 | FROM public.ecr.aws/ubuntu/ubuntu:24.04
3 |
4 | # just to invalidate cache
5 | ARG BUILDDATE=99999999
6 |
7 | RUN sed -i -e 's|archive\.ubuntu\.com/ubuntu|ap-northeast-1.ec2.archive.ubuntu.com/ubuntu|g' /etc/apt/sources.list.d/ubuntu.sources
8 |
9 | ENV DEBIAN_FRONTEND=noninteractive
10 | RUN mkdir -p /build \
11 | && apt-get update \
12 | && apt-get install -y tzdata debhelper dh-make devscripts gnupg2 vim equivs
13 | RUN mkdir -p -m700 /root/.gnupg
14 |
15 |
--------------------------------------------------------------------------------
/bin/glitch_word.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | # Usage:
3 | # -- Basic
4 | # $ glitch_word.rb あいう
5 | # うううあああいい
6 | #
7 | # -- Separate argument to keep order of words
8 | # $ glitch_word.rb 壊れていません 、 大丈夫 です
9 | # いいまま壊壊壊壊壊壊んんててせせれれ、、、、、、丈丈丈大大夫夫夫すすでででででで
10 |
11 |
12 | def glitch(word, max_sequence = ENV["SEQ"] ? ENV["SEQ"].to_i : 3, max_chunks = ENV["CHUNK"] ? ENV["CHUNK"].to_i : 1)
13 | word.chars.map.with_index do |x, i|
14 | [[x] * rand(max_sequence).succ] * rand(max_chunks).succ
15 | end.shuffle.flatten.join
16 | end
17 |
18 | puts ARGV.map { |_| glitch _ }.join
19 |
--------------------------------------------------------------------------------
/linux/x/dot.xbindkeysrc:
--------------------------------------------------------------------------------
1 | "xvkbd -xsendevent -text '\[Left]'"
2 | Alt + h
3 |
4 | "xvkbd -xsendevent -text '\[Down]'"
5 | Alt + j
6 |
7 | "xvkbd -xsendevent -text '\[Up]'"
8 | Alt + k
9 |
10 | "xvkbd -xsendevent -text '\[Right]'"
11 | Alt + l
12 |
13 | "pactl set-sink-mute 0 false ; pactl set-sink-volume 0 +5%"
14 | XF86AudioRaiseVolume
15 |
16 | "pactl set-sink-mute 0 false ; pactl set-sink-volume 0 -5%"
17 | XF86AudioLowerVolume
18 |
19 | "pactl set-sink-mute 0 toggle"
20 | XF86AudioMute
21 |
22 | "pactl set-source-mute 1 toggle"
23 | XF86AudioMicMute
24 |
--------------------------------------------------------------------------------
/bin/sorah-ip2asn:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'resolv'
3 | require 'ipaddr'
4 |
5 | dns = Resolv::DNS.new
6 | ip = IPAddr.new(ARGV[0], ARGV[0].include?(':') ? Socket::AF_INET6 : Socket::AF_INET)
7 | reverse = ip.reverse.sub(/\.(?:in-addr\.arpa|ip6\.int|ip6\.arpa)\z/, '')
8 |
9 | dns.getresource("#{reverse}.origin#{ip.ipv6? ? '6' : nil}.asn.cymru.com", Resolv::DNS::Resource::IN::TXT).strings.each do |origin_as|
10 | asn = origin_as.split(' ').first
11 | ass = dns.getresource("AS#{asn}.asn.cymru.com", Resolv::DNS::Resource::IN::TXT).strings
12 | ass.each do |as|
13 | puts "#{as} / #{origin_as}"
14 | end
15 | end
16 |
--------------------------------------------------------------------------------
/circus/circus.ini:
--------------------------------------------------------------------------------
1 | [circus]
2 | check_delay = 5
3 | endpoint = tcp://127.0.0.1:3830
4 | pubsub_endpoint = tcp://127.0.0.1:3831
5 | check_delay = 5
6 | endpoint_owner = sorah
7 | pidfile = /Users/sorah/.circus/circus.pid
8 | logoutput = /Users/sorah/.circus/circusd.log
9 | include_dir = /Users/sorah/.circus
10 |
11 | #[watcher:xxx]
12 | #cmd = bundle exec xxx
13 | #copy_env = true
14 | #stdout_stream.class = FileStream
15 | #stdout_stream.filename = /Users/sorah/.circus/xxx.log
16 | #stdout_stream.time_format = %Y-%m-%d %H:%M:%S
17 | #stdout_stream.max_bytes = 1073741824
18 | #stdout_stream.backup_count = 5
19 | #stderr_stream.class = StdoutStream
20 |
--------------------------------------------------------------------------------
/ssh/ssh_config.d/10-rubykaigi.conf:
--------------------------------------------------------------------------------
1 | # vim: ft=sshconfig
2 | ## nw.rubykaigi
3 |
4 | Host bastion.rubykaigi.net bastion-usw2.rubykaigi.net
5 | User rk
6 | Port 9922
7 |
8 | Host *.rubykaigi.net !bastion.rubykaigi.net !bastion-usw2.rubykaigi.net
9 | ProxyJump bastion.rubykaigi.net
10 | User rk
11 |
12 | Host *.venue.rubykaigi.net *.nrt.rubykaigi.net *.hnd.rubykaigi.net *.itm.rubykaigi.net *.hot.rubykaigi.net
13 | HostKeyAlgorithms +ssh-rsa
14 | KexAlgorithms +diffie-hellman-group14-sha1,diffie-hellman-group1-sha1
15 |
16 | Host br-*.rubykaigi.net er-*.rubykaigi.net cs-*.rubykaigi.net
17 | SetEnv TERM=screen
18 | #Ciphers +aes256-cbc
19 |
--------------------------------------------------------------------------------
/bin/sorah-aws-ec2-get-console-output:
--------------------------------------------------------------------------------
1 | #!/bin/bash -e
2 | if [ "$#" -lt 1 ]; then
3 | echo "Usage: $0 name [region]"
4 | fi
5 |
6 | region="${2:-$AWS_DEFAULT_REGION}"
7 |
8 | if echo "$1" | grep -q "^i-"; then
9 | instance_id="$1"
10 | else
11 | instance_id="$(aws ec2 describe-instances --region "${region}" --filter "Name=tag:Name,Values=$1" | jq -r '.Reservations[] | .Instances[] | .InstanceId' | head -n1)"
12 | fi
13 |
14 | if ! echo "$instance_id" | grep -q "^i-"; then
15 | echo "invalid instance id or couldn't find the instance $instance_id" 1>&2
16 | exit 1
17 | fi
18 |
19 | aws ec2 get-console-output --region "${region}" --instance-id "${instance_id}" | jq -r .Output | less -R
20 |
--------------------------------------------------------------------------------
/bin/sorah-rsa-pem-to-jwk:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'base64'
3 | require 'openssl'
4 | require 'json'
5 |
6 | data = ARGF.read.yield_self{ |_| _.start_with?('-----') ? _ : _.unpack1('m*') }
7 | key = OpenSSL::PKey::RSA.new(data, '')
8 |
9 | # https://cs.github.com/kubernetes/kubernetes/blob/4dba52cdf4684ad47ee55e5bf251d742f39f11a3/pkg/serviceaccount/jwt.go#L98
10 | kid = Base64.urlsafe_encode64(OpenSSL::Digest.digest('sha256', key.to_der)).gsub(/=+$/,'')
11 |
12 | jwk = {
13 | use: "sig",
14 | kty: 'RSA',
15 | kid: kid,
16 | n: Base64.urlsafe_encode64(key.n.to_s(2)).gsub(/=+/,''),
17 | e: Base64.urlsafe_encode64(key.e.to_s(2)).gsub(/=+/,''),
18 | }.to_json
19 |
20 | puts jwk
21 |
--------------------------------------------------------------------------------
/misc/dot.gitconfig:
--------------------------------------------------------------------------------
1 | # vim: ft=gitconfig
2 | [ui]
3 | color = auto
4 | [push]
5 | default = simple
6 | [user]
7 | name = Sorah Fukumori
8 | #email = her@sorah.jp
9 | #signingkey = F4C0895C
10 | [commit]
11 | gpgsign = true
12 | [sendemail]
13 | smtpencryption = tls
14 | smtpserver = smtp.sorah.jp
15 | smtpuser = sorah@sorah.jp
16 | smtpserverport = 587
17 | [pull]
18 | ff = only
19 | [merge]
20 | conflictStyle = diff3
21 | [core]
22 | preloadindex = true
23 | editor = nvim
24 | [init]
25 | defaultBranch = main
26 | [feature]
27 | manyFiles = false
28 | [merge "dpkg-mergechangelogs"]
29 | name = debian/changelog merge driver
30 | driver = dpkg-mergechangelogs -m %O %A %B %A
31 |
--------------------------------------------------------------------------------
/bin/sorah-aws-ecr-public-cleanup:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'aws-sdk-ecrpublic'
3 |
4 | @ecr = Aws::ECRPublic::Client.new(region: 'us-east-1')
5 |
6 | dry_run = ARGV.delete('--dry-run')
7 | repository_name = ARGV[0] or abort "usage #$0 REPOSITORY"
8 |
9 | @ecr.describe_images(repository_name:).each do |page|
10 | image_ids = page.image_details.filter_map do |i|
11 | if i.image_tags && !i.image_tags.empty?
12 | nil
13 | else
14 | p i
15 | {
16 | image_digest: i.image_digest
17 | }
18 | end
19 | end
20 |
21 | next if image_ids.empty?
22 |
23 | unless dry_run
24 | @ecr.batch_delete_image(
25 | repository_name:,
26 | image_ids:,
27 | )
28 | end
29 | end
30 |
--------------------------------------------------------------------------------
/nginx/jp.sorah.launchagent.nginx.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Label
6 | jp.sorah.launchagent.nginx
7 | RunAtLoad
8 |
9 | KeepAlive
10 |
11 | ProgramArguments
12 |
13 | /usr/local/opt/nginx-full/bin/nginx
14 | -c
15 | /etc/nginx.conf
16 | -g
17 | daemon off;
18 |
19 | WorkingDirectory
20 | /
21 |
22 |
23 |
--------------------------------------------------------------------------------
/vim/coc-settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "languageserver": {
3 | "terraform": {
4 | "command": "terraform-ls",
5 | "args": ["serve"],
6 | "filetypes": ["terraform", "tf"]
7 | },
8 | "ruby": {
9 | "command": "ruby-lsp",
10 | "filetypes": ["ruby"]
11 | },
12 | "jsonnet": {
13 | "command": "jsonnet-language-server",
14 | "args": [],
15 | "filetypes": ["jsonnet"]
16 | }
17 | },
18 | "[typescript][typescriptreact][javascript][javascriptreact]": {
19 | "coc.preferences.formatOnSave": true
20 | },
21 | "[go]": {
22 | "coc.preferences.formatOnSave": true
23 | },
24 | "[terraform]": {
25 | "coc.preferences.formatOnSave": true
26 | },
27 | "coc.preferences.formatOnSave": false
28 | }
29 |
--------------------------------------------------------------------------------
/bin/sorah-aws-ec2-get-password-data:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'aws-sdk-ec2'
3 | require 'openssl'
4 |
5 | abort "Usage #$0 key instance" if ARGV.size < 2
6 |
7 | ec2 = Aws::EC2::Client.new(region: ENV['AWS_REGION'] || ENV['AWS_DEFAULT_REGION'])
8 |
9 |
10 | instance_id = case ARGV[1]
11 | when /\Ai-/
12 | ARGV[1]
13 | else
14 | ec2.describe_instances(filters: [{name: 'tag:Name', values: [ARGV[1]]}]).reservations[0].instances[0].instance_id
15 | end
16 |
17 | crypt = ec2.get_password_data(instance_id: instance_id).password_data
18 | key = OpenSSL::PKey::RSA.new(File.read(File.expand_path(ARGV[0])))
19 |
20 | puts key.private_decrypt(crypt.strip.unpack('m*')[0], OpenSSL::PKey::RSA::PKCS1_PADDING)
21 |
--------------------------------------------------------------------------------
/bin/sorah-apt-keyring-pull:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | # This script helps retrieving key from ubuntu keyserver and dearmor for signed-by usage.
3 |
4 | require 'open-uri'
5 | require 'fileutils'
6 | require 'open3'
7 |
8 | force = ARGV.delete('--force')
9 | abort "aborting because stdout is tty (or ignore with --force)" if !force && $stdout.tty?
10 |
11 | keyid = ARGV[0]
12 | abort "Usage: #{$0} keyid\ne.g. #{$0} 6DC3636DAE534049C8B94623A122542AB04F24E3" unless keyid
13 |
14 | url = keyid.start_with?('https://') ? keyid : "https://keyserver.ubuntu.com/pks/lookup?search=0x#{keyid}&exact=on&options=mr&op=get"
15 | armored = URI.open(url, 'r', &:read)
16 | dearmored, status = Open3.capture2("gpg", "--dearmor", stdin_data: armored)
17 | raise unless status.success?
18 |
19 | $stdout.write dearmored
20 |
--------------------------------------------------------------------------------
/circus/homeproxy.ini:
--------------------------------------------------------------------------------
1 | [watcher:homeproxy]
2 | cmd = autossh -M 10005 -N homeproxy
3 | copy_env = true
4 | stdout_stream.class = FileStream
5 | stdout_stream.filename = /Users/sorah/.circus/homeproxy.log
6 | stdout_stream.time_format = %Y-%m-%d %H:%M:%S
7 | stderr_stream.class = FileStream
8 | stderr_stream.filename = /Users/sorah/.circus/homeproxy.err
9 | stderr_stream.time_format = %Y-%m-%d %H:%M:%S
10 |
11 |
12 | [watcher:nkmiproxy]
13 | cmd = autossh -M 10006 -N nkmiproxy
14 | copy_env = true
15 | stdout_stream.class = FileStream
16 | stdout_stream.filename = /Users/sorah/.circus/nkmiproxy.log
17 | stdout_stream.time_format = %Y-%m-%d %H:%M:%S
18 | stderr_stream.class = FileStream
19 | stderr_stream.filename = /Users/sorah/.circus/nkmiproxy.err
20 | stderr_stream.time_format = %Y-%m-%d %H:%M:%S
21 |
--------------------------------------------------------------------------------
/Brewfile:
--------------------------------------------------------------------------------
1 | # vim: ft=ruby
2 | cask '1password'
3 | cask 'google-chrome@dev'
4 | cask 'firefox@developer-edition'
5 | cask 'google-drive'
6 | cask 'aquaskk'
7 | cask 'karabiner-elements'
8 | cask 'logseq'
9 | cask 'parsec'
10 | cask 'tailscale'
11 | cask 'logitech-g-hub'
12 | cask 'prolific-pl2303'
13 | cask 'linearmouse'
14 | cask 'the-unarchiver'
15 | cask 'wezterm'
16 | cask 'visual-studio-code@insiders'
17 | cask 'discord@ptb'
18 | cask 'slack'
19 | cask 'spotify'
20 | cask 'vlc'
21 | cask 'wireshark'
22 | cask 'figma'
23 | cask 'zoom'
24 |
25 | tap 'jorgelbg/tap'
26 |
27 | brew 'gnupg'
28 | brew 'gnu-sed'
29 | brew 'pinentry-touchid'
30 | brew 'pinentry-mac'
31 | brew 'rbenv'
32 | brew 'ruby-build'
33 | brew 'tmux'
34 | brew 'mtr'
35 |
36 | mas 'Dictionaries', id: 1380563956
37 | mas 'Ivory', id: 6444602274
38 |
--------------------------------------------------------------------------------
/bin/ssh-proxy-home:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | export PATH=$(dirname $0):$PATH
3 | sorah_ssh_port="$(grep '^# sorah_ssh_port: ' ~/.ssh/config|awk '{print $3}')"
4 | if [[ -z "${sorah_ssh_port}" ]]; then
5 | echo "specify '# sorah_ssh_port: ' in ~/.ssh/config" 1>&2
6 | exit 1
7 | fi
8 |
9 | nw="$(sorah-nw-simple-locate)"
10 |
11 | if [[ -n "$1" && -n "$2" ]]; then
12 | host="$1"
13 | port="$2"
14 | fi
15 |
16 | if [[ -z "${host}" ]]; then
17 | bastion=yes
18 | host=ssh.bastion.nkmi.me
19 | port=${sorah_ssh_port}
20 | fi
21 |
22 | case "${nw}" in
23 | internal|vpn)
24 | exec nc "${host}" "${port}"
25 | ;;
26 | external|unknown)
27 | if [[ "_${bastion}" = "_yes" ]]; then
28 | exec nc ssh.bastion.nkmi.me ${sorah_ssh_port}
29 | else
30 | exec ssh ssh.bastion.nkmi.me -W "${host}:${port}"
31 | fi
32 | esac
33 |
--------------------------------------------------------------------------------
/debuild-docker/Dockerfile.bionic:
--------------------------------------------------------------------------------
1 | # See also: https://github.com/sorah/config/blob/master/bin/sorah-debuild
2 | FROM public.ecr.aws/ubuntu/ubuntu:18.04
3 |
4 | # just to invalidate cache
5 | ARG BUILDDATE=99999999
6 | ENV DEBIAN_FRONTEND=noninteractive
7 |
8 | RUN sed -i -e 's|archive\.ubuntu\.com/ubuntu|ap-northeast-1.ec2.archive.ubuntu.com/ubuntu|g' /etc/apt/sources.list
9 |
10 | # Allow use of debhelper-compat 13 in backport
11 | # https://help.ubuntu.com/community/UbuntuBackports
12 | RUN /bin/echo -e "Package: debhelper libdebhelper-perl dh-autoreconf dwz\nPin: release a=bionic-backports\nPin-Priority: 500\n" | tee -a /etc/apt/preferences
13 |
14 | RUN mkdir -p /build \
15 | && apt-get update \
16 | && apt-get install -y tzdata debhelper dh-make devscripts gnupg2 vim equivs
17 | RUN mkdir -p -m700 /root/.gnupg
18 |
19 |
--------------------------------------------------------------------------------
/debuild-docker/Dockerfile.focal:
--------------------------------------------------------------------------------
1 | # See also: https://github.com/sorah/config/blob/master/bin/sorah-debuild
2 | FROM public.ecr.aws/ubuntu/ubuntu:20.04
3 |
4 | # just to invalidate cache
5 | ARG BUILDDATE=99999999
6 |
7 | # Allow use of debhelper-compat 13 in backport
8 | # https://help.ubuntu.com/community/UbuntuBackports
9 | RUN /bin/echo -e "Package: debhelper libdebhelper-perl dh-autoreconf dwz\nPin: release a=focal-backports\nPin-Priority: 500\n" | tee -a /etc/apt/preferences
10 |
11 | RUN sed -i -e 's|archive\.ubuntu\.com/ubuntu|ap-northeast-1.ec2.archive.ubuntu.com/ubuntu|g' /etc/apt/sources.list
12 |
13 | ENV DEBIAN_FRONTEND=noninteractive
14 | RUN mkdir -p /build \
15 | && apt-get update \
16 | && apt-get install -y tzdata debhelper dh-make devscripts gnupg2 vim equivs
17 | RUN mkdir -p -m700 /root/.gnupg
18 |
19 |
--------------------------------------------------------------------------------
/bin/sorah-zsh-history-time-stats:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | # frozen_string_literal: true
3 | require 'time'
4 |
5 | timespecstr = ARGV[0]
6 | abort "Usage: #$0 timespec-after" unless timespecstr
7 | timespec = Time.parse(timespecstr)
8 |
9 |
10 | ts = []
11 | File.open(File.expand_path('~/.zsh_history'), 'rb') do |io|
12 | io.each_line do |x|
13 | t = Time.at(x.split(':'.b)[1].to_i)
14 | next if t < timespec
15 | ts.push t
16 | end
17 | end
18 |
19 | stats = ts.group_by do |t|
20 | t.strftime("%Y-%m-%d")
21 | end.transform_values do |ts|
22 | ts.group_by(&:hour).transform_values(&:size)
23 | end
24 |
25 | stats.to_a.sort_by(&:first).each do |(day,hours)|
26 | puts "= #{day}"
27 | hours.to_a.sort_by(&:first).each do |(hour,size)|
28 | puts " #{hour.to_s.rjust(2,' ')}: #{size}"
29 | end
30 | puts
31 | end
32 |
--------------------------------------------------------------------------------
/bin/update-ruby-trunk:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 | set -x
4 |
5 | RBENV_ROOT=${RBENV_ROOT:-~/.rbenv}
6 |
7 | mkdir -p ${RBENV_ROOT}/sources/trunk
8 |
9 | if [ -d ${RBENV_ROOT}/sources/trunk/ruby ]; then
10 | cd ${RBENV_ROOT}/sources/trunk/ruby
11 | git pull --rebase
12 | else
13 | git clone https://github.com/ruby/ruby ${RBENV_ROOT}/sources/trunk/ruby
14 | cd ${RBENV_ROOT}/sources/trunk/ruby
15 | ruby tool/downloader.rb -d tool -e gnu config.guess config.sub
16 | autoconf
17 | fi
18 |
19 | mkdir -p ${RBENV_ROOT}/sources/trunk/build
20 | cd ${RBENV_ROOT}/sources/trunk/build
21 |
22 | sorah-ruby-configure ../ruby/configure --prefix=${RBENV_ROOT}/versions/trunk
23 | make -j4
24 | make install
25 |
26 | if [ ! -x ${RBENV_ROOT}/versions/trunk/bin/bundle ]; then
27 | ${RBENV_ROOT}/versions/trunk/bin/gem install bundler
28 | fi
29 |
--------------------------------------------------------------------------------
/bin/np_itunes_mac.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env osascript
2 | # UTF-8 だニャン
3 | # Returns the current playing song in iTunes for OSX
4 |
5 | tell application "System Events"
6 | set process_list to (name of every process)
7 | end tell
8 |
9 | if process_list contains "iTunes" then
10 | tell application "iTunes"
11 | if player state is playing then
12 | set track_name to name of current track
13 | #set artist_name to artist of current track
14 | set trim_length to 40
15 | #set now_playing to "♫ " & artist_name & " - " & track_name
16 | set now_playing to " ♫ " & track_name & " "
17 | if length of now_playing is less than trim_length then
18 | set now_playing_trim to now_playing
19 | else
20 | set now_playing_trim to characters 1 thru trim_length of now_playing as string
21 | end if
22 | end if
23 | end tell
24 | end if
25 |
--------------------------------------------------------------------------------
/bin/sorah-aws-ec2-running-instances:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'optparse'
3 | require 'open-uri'
4 | require 'tempfile'
5 | require 'aws-sdk-ec2'
6 | require 'json'
7 | require 'yaml'
8 | require 'logger'
9 |
10 | Aws.config[:logger] = Logger.new($stderr)
11 |
12 | def regions
13 | @regions ||= Aws::EC2::Client.new(region: ENV['AWS_DEFAULT_REGION'] || 'ap-northeast-1').describe_regions.regions.map(&:region_name)
14 | end
15 |
16 | instances = regions.map do |region|
17 | Thread.new do
18 | ec2 = Aws::EC2::Resource.new(region: region)
19 | ec2.instances(filters: [name: 'instance-state-name', values: %w(running)]).map do |instance|
20 | name = instance.tags.find { |_| _.key == 'Name' }&.value
21 | [instance.instance_id, name, instance.instance_type, region, instance.public_dns_name].join(?\t)
22 | end
23 | end
24 | end.flat_map(&:value)
25 |
26 | puts instances
27 |
--------------------------------------------------------------------------------
/mac/dot.config/karabiner/karabiner.json:
--------------------------------------------------------------------------------
1 | {
2 | "profiles": [
3 | {
4 | "devices": [
5 | {
6 | "identifiers": { "is_keyboard": true },
7 | "simple_modifications": [
8 | {
9 | "from": { "key_code": "right_shift" },
10 | "to": [{ "key_code": "escape" }]
11 | }
12 | ]
13 | }
14 | ],
15 | "name": "Default profile",
16 | "selected": true,
17 | "simple_modifications": [
18 | {
19 | "from": { "key_code": "right_shift" },
20 | "to": [{ "key_code": "escape" }]
21 | }
22 | ],
23 | "virtual_hid_keyboard": { "keyboard_type_v2": "ansi" }
24 | }
25 | ]
26 | }
--------------------------------------------------------------------------------
/bin/sorah-ruby-configure:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -x
3 |
4 | export optflags="-O0"
5 | export debugflags="-ggdb3"
6 | export CXXFLAGS="${optflags} ${debugflags}"
7 | export CFLAGS="${optflags} ${debugflags}"
8 |
9 | conf=$1
10 | shift
11 |
12 | if [ "$(uname)" = "Darwin" ]; then
13 | $conf --enable-shared \
14 | --with-gcc=clang CXX=clang++ \
15 | --with-arch=x86_64 \
16 | --with-out-ext=tk,tk/* \
17 | --with-valgrind \
18 | --with-readline-dir=$(brew --prefix readline) \
19 | --with-openssl-dir=$(brew --prefix openssl) \
20 | "$@" \
21 | optflags="${optflags}" \
22 | debugflags="${debugflags}"
23 | else
24 | $conf --enable-shared \
25 | --with-out-ext=tk,tk/* \
26 | --with-valgrind \
27 | --enable-yjit=stats \
28 | "$@" \
29 | optflags="${optflags}" \
30 | debugflags="${debugflags}"
31 | fi
32 |
--------------------------------------------------------------------------------
/bin/train.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | # coding: utf-8
3 | require 'open-uri'
4 |
5 | CACHE = "/tmp/train.rb.cache"
6 |
7 | if File.exist?(CACHE) && (Time.now - File::Stat.new(CACHE).mtime) <= 300
8 | print File.read(CACHE)
9 | exit
10 | end
11 |
12 |
13 | train_info = open('http://traininfo.jreast.co.jp/train_info/kanto.aspx','r',&:read)
14 | unless ''.respond_to?(:encode)
15 | require 'kconv'
16 | train_info = train_info.toutf8
17 | end
18 |
19 | #m = train_info.scan(/宇都宮線|湘南新宿ライン|高崎線|埼京線|山手線/).flatten.uniq
20 | m = train_info.scan(%r{| (?:)?(?:)?(.+?)(?:)?(?:)? | (?:)?(.+?)}).uniq.map{|x| x.join(":").gsub(/ /,'') }
21 | result = m.empty? ? "" : " #{m.join(", ")} "
22 | open(CACHE, 'w'){|io| io.print result }
23 | print result
24 |
--------------------------------------------------------------------------------
/bin/bf01d_status.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'open-uri'
3 | require 'nokogiri'
4 |
5 | if ARGV.empty?
6 | router, pass = File.read(File.expand_path("~/.bf01d")).split(/\r?\n/).map(&:chomp)
7 | else
8 | router, pass = ARGV[0..1]
9 | end
10 | url = "http://#{router}:8888/status"
11 |
12 | begin
13 | xml_ = open(url, http_basic_authentication: ['root', pass], &:read)
14 | rescue Exception; exit 1; end
15 | xml = Nokogiri::XML.parse(xml_)
16 |
17 |
18 | battery = xml.at('pwrStatus power battery').inner_text.to_i
19 |
20 | route = xml.at('pwrStatus routeInterface').inner_text.to_i
21 | routex = xml.at("pwrStatus interface[id='#{route}']")
22 |
23 | print "BF-01D #{battery}% - "
24 | case route
25 | when 0
26 | puts "Wi-Fi: #{routex.at('ssid').inner_text} (#{routex.at('rssi').inner_text})"
27 | when 1
28 | puts "#{routex.at('lte').inner_text == 'true' ? "LTE" : "3G"} (#{routex.at('rssi').inner_text})"
29 | when 2
30 | puts "Ethernet"
31 | else
32 | puts "Offline"
33 | end
34 |
--------------------------------------------------------------------------------
/ssh/ssh_config.d/10-kmc.conf:
--------------------------------------------------------------------------------
1 | # vim: ft=sshconfig
2 | ## kmc
3 |
4 | Host kmc.gr.jp
5 | User sorah
6 | IdentityFile ~/.ssh/id_ecdsa
7 | IdentityFile ~/.ssh/id_rsa
8 | IdentitiesOnly yes
9 | ForwardAgent no
10 | ControlMaster auto
11 | ControlPath ~/.ssh/master-%r@%h:%p
12 | ControlPersist 1h
13 |
14 | Host *.bb.box2.kmc.gr.jp *.bb.kmc.gr.jp
15 | User kmc
16 | Port 9922
17 |
18 | Host *.box2.kmc.gr.jp
19 | User sorah
20 | ProxyJump kmc.gr.jp
21 |
22 | Host *.c.bb.kmc.gr.jp
23 | ProxyJump rola.srh.bb.kmc.gr.jp
24 |
25 | Host *.kmc.gr.jp
26 | User kmc
27 | IdentityFile ~/.ssh/id_ecdsa
28 | IdentityFile ~/.ssh/id_rsa
29 | IdentitiesOnly yes
30 | ForwardAgent no
31 |
32 | Host *.bb.kmc.gr.jp *.bb.box2.kmc.gr.jp
33 | Port 9922
34 | User kmc
35 |
36 | Host hime.srh.kmc.gr.jp
37 | Port 9922
38 | User kmc
39 |
40 | Host rei.srh.kmc.gr.jp
41 | Port 22
42 | HostName rei.srh.c.kmc.gr.jp
43 | User kmc
44 |
45 | Host *.box2.kmc.gr.jp
46 | ProxyJump kmc.gr.jp
47 |
48 | Host *.srh.kmc.gr.jp *.srh.c.kmc.gr.jp !hime.srh.kmc.gr.jp
49 | Port 9922
50 | User kmc
51 | ProxyJump hime.srh.kmc.gr.jp
52 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2020 Sorah Fukumori
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in
13 | all copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 | THE SOFTWARE.
22 |
--------------------------------------------------------------------------------
/bin/sorah-fig-start:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -x
3 | set -e
4 | shopt -s nullglob
5 |
6 | mkdir -p ~/.docker-compose-transient
7 |
8 | umask 0077
9 | cd ~
10 |
11 | fig-up() {
12 | local base transient x figfile wrapper
13 | base="${x##*/}"
14 | transient=~/.docker-compose-transient/${base%.yml}.yml
15 | x=$1
16 |
17 | if [ -d $x ]; then
18 | if [ -L $x ]; then
19 | x=$(readlink $x)
20 | fi
21 | cd $x
22 |
23 | figfile="${x}/docker-compose.yml"
24 | else
25 | figfile="${x}"
26 | fi
27 |
28 | wrapper="$(grep '^# wrap: ' "${figfile}"|cut -d ' ' -f 3-)"
29 |
30 | if [ -d $x ]; then
31 | if [ -x ./pre.sh ]; then
32 | $wrapper ./pre.sh
33 | fi
34 | fi
35 |
36 | $wrapper ruby -rerb -e 'ERB.new(ARGF.read, nil, "-").run' $figfile > $transient
37 | docker-compose -f $transient -p ${base%.yml} up -d
38 |
39 | if [ -d $x ]; then
40 | if [ -x ./post.sh ]; then
41 | $wrapper ./post.sh
42 | fi
43 | fi
44 |
45 | rm $transient
46 | cd ~
47 | }
48 |
49 | for x in ${@:-$HOME/.docker-compose/*}; do
50 | if [ ! -e $x ]; then
51 | x="${HOME}/.docker-compose/${x}"
52 | fi
53 | fig-up $x
54 | done
55 |
--------------------------------------------------------------------------------
/bin/sorah-dns-sight:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'resolv'
3 | require 'ipaddr'
4 |
5 | $stdout.sync = true
6 | fqdn = ARGV[0] || 'www.facebook.com'
7 |
8 | dns = Resolv::DNS.new
9 |
10 | puts "# About `#{fqdn}`"
11 |
12 | puts
13 | puts "## DNS"
14 | puts
15 |
16 | ips = dns.getaddresses(fqdn)
17 | ip = IPAddr.new(ips.first.address.unpack('N').first, ips.first.is_a?(Resolv::IPv4) ? Socket::AF_INET : Socket::AF_INET6)
18 | puts "`#{ips.inspect}`, #{ip}"
19 |
20 | puts
21 | print "```"
22 | system "dig", "-x", ip.to_s
23 | puts "```"
24 |
25 | puts
26 | print "```"
27 | system "dig", fqdn
28 | puts "```"
29 |
30 | puts
31 | puts "## Whois (IP)"
32 | puts
33 |
34 | puts "```"
35 | system "whois", ip.to_s
36 | puts "```"
37 |
38 | puts
39 | puts "## ASN"
40 | puts
41 |
42 | reverse = ip.reverse.sub(/\.(?:in-addr\.arpa|ip6\.int|ip6\.arpa)\z/, '')
43 |
44 | dns.getresource("#{reverse}.origin#{ip.ipv6? ? '6' : nil}.asn.cymru.com", Resolv::DNS::Resource::IN::TXT).strings.each do |origin_as|
45 | asn = origin_as.split(' ').first
46 | ass = dns.getresource("AS#{asn}.asn.cymru.com", Resolv::DNS::Resource::IN::TXT).strings
47 | ass.each do |as|
48 | puts " - `#{origin_as}` `#{as}`"
49 | end
50 | end
51 |
--------------------------------------------------------------------------------
/bin/sorah-is-network-stable:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | unless RUBY_PLATFORM =~ /darwin/
4 | exit 0
5 | end
6 |
7 | stable_wifi_aps = %w(sorah soraher)
8 | if File.exist?(File.expand_path('~/.sorah_stable_aps'))
9 | stable_wifi_aps.concat File.read(File.expand_path('~/.sorah_stable_aps')).each_line.map(&:chomp)
10 | end
11 |
12 | list = `networksetup -listallhardwareports`
13 |
14 | active_iface_raw = `route get default`.match(/^\s*interface:\s*(.+)$/)[1]
15 |
16 | iface_to_name = Hash[list.scan(/Hardware Port: (.+)\nDevice: (.+)\n/).map { |(name,iface)| [iface, name] }]
17 | aliases = Hash[list.scan(/Parent Device: (.+)\nDevice (?:\(.+\))?: (.+)\n/).map { |(parent,iface)| [iface, parent] }]
18 |
19 | active_iface = aliases[active_iface_raw] || active_iface_raw
20 |
21 | if active_iface.start_with?('ppp') # VPN
22 | exit 1
23 | end
24 |
25 | wifi_ap = begin
26 | match = IO.popen(['networksetup', '-getairportnetwork', active_iface], 'r', &:read).
27 | match(/^Current Wi-Fi Network: (.+)$/)
28 | match && match[1]
29 | end
30 |
31 | if wifi_ap
32 | exit stable_wifi_aps.include?(wifi_ap) ? 0 : 1
33 | else
34 | case iface_to_name[active_iface]
35 | when /Bluetooth/i
36 | exit 1
37 | when /iPhone|iPad/i
38 | exit 1
39 | end
40 |
41 | exit 0
42 | end
43 |
--------------------------------------------------------------------------------
/bin/sorah-ecdsa-pem-to-jwk:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'base64'
3 | require 'openssl'
4 | require 'json'
5 |
6 | data = ARGF.read.yield_self{ |_| _.start_with?('-----') ? _ : _.unpack1('m*') }
7 | key = OpenSSL::PKey::EC.new(data, '')
8 | key.private_key = nil
9 |
10 | crv = case key.group.curve_name
11 | when 'prime256v1'
12 | 'P-256'
13 | when 'secp384r1'
14 | 'P-384'
15 | when 'secp521r1'
16 | 'P-521'
17 | else
18 | raise "unknown curve"
19 | end
20 |
21 | # https://github.com/unixcharles/acme-client/blob/master/lib/acme/client/jwk/ecdsa.rb
22 | hex = key.public_key.to_bn.to_s(16)
23 | data_len = hex.length - 2
24 | hex_x = hex[2, data_len / 2]
25 | hex_y = hex[2 + data_len / 2, data_len / 2]
26 |
27 | # https://cs.github.com/kubernetes/kubernetes/blob/4dba52cdf4684ad47ee55e5bf251d742f39f11a3/pkg/serviceaccount/jwt.go#L98
28 | kid = Base64.urlsafe_encode64(OpenSSL::Digest.digest('sha256', key.to_der)).gsub(/=+$/,'')
29 |
30 | jwk = {
31 | use: "sig",
32 | kty: 'EC',
33 | crv: crv,
34 | kid: kid,
35 | x: Base64.urlsafe_encode64(OpenSSL::BN.new([hex_x].pack('H*'), 2).to_s(2)).gsub(/\n|=/, ''),
36 | y: Base64.urlsafe_encode64(OpenSSL::BN.new([hex_y].pack('H*'), 2).to_s(2)).gsub(/\n|=/, ''),
37 | }.to_json
38 |
39 | puts jwk
40 |
--------------------------------------------------------------------------------
/vim/dot.vim/spell/en.utf-8.add:
--------------------------------------------------------------------------------
1 | Sora
2 | Harakami
3 | vimrcbox
4 | vimrc
5 | plugin
6 | RcbVimrc
7 | RcbGVimrc
8 | gvimrc
9 | IMproved
10 | Termtter
11 | TermtterKaigi
12 | termtter
13 | github
14 | jugyo
15 | gmail
16 | sora134
17 | committers
18 | lang
19 | org
20 | SEGV
21 | committer
22 | CyanogenMod
23 | GDD
24 | HTC
25 | geekhouse
26 | higashinihonbashi
27 | MacBook
28 | Tinatra
29 | oauth
30 | OAuth
31 | Woooooo
32 | JRuby's
33 | JRuby
34 | ruby1
35 | KCODE
36 | equal
37 | App
38 | jN
39 | j2
40 | https
41 | sorah
42 | dev
43 | minitest
44 | twrm
45 | p180
46 | OSX
47 | mspec
48 | RSpec
49 | Fukumori
50 | Shota
51 | tubusu
52 | cosmio
53 | sorah
54 | Bio
55 | RubyConf
56 | Rubyist
57 | TestCase
58 | ParallelizeTestAll
59 | blog
60 | Bormann
61 | Kosaki
62 | enterable
63 | redmine
64 | nagaokaut
65 | cgi
66 | rb
67 | nahi
68 | Naruse
69 | s
70 | るりま
71 | __From__
72 | ruby
73 | nph
74 | Nakada
75 | Nobuyoshi
76 | tenderlove
77 | finalizers
78 | Matz
79 | offline
80 | including
81 | EC2
82 | hostname
83 | cookpad
84 | jp
85 | app
86 | LOC
87 | cutty
88 | Hubot
89 | io
90 | www
91 | apps
92 | mamiya
93 | symlinks
94 | microservices
95 | middlewares
96 | tarballs
97 | etcd
98 | GitHub
99 | KMC
100 | javascript
101 | nginx
102 |
--------------------------------------------------------------------------------
/linux/x/dot.xinitrc:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export LANG=en_US.UTF-8
4 |
5 | export GTK_IM_MODULE=ibus
6 | export XMODIFIERS=@im=ibus
7 | export QT_IM_MODULE=ibus
8 | ibus-daemon -drx
9 |
10 | [[ -f ~/.Xresources ]] && xrdb -merge ~/.Xresources
11 | if [[ -e ~/.Xmodmap ]]; then
12 | xmodmap ~/.Xmodmap
13 | xkbset m
14 | fi
15 | if [[ -e ~/.fehbg ]]; then
16 | sh ~/.fehbg &
17 | fi
18 |
19 | dbus-update-activation-environment --systemd DISPLAY
20 |
21 | if [[ -e /usr/bin/keychain ]]; then
22 | eval `keychain --eval --agents ssh --systemd`
23 | export SSH_AUTH_SOCK
24 | fi
25 | if [[ -e /usr/bin/gnome-keyring-daemon ]]; then
26 | eval $(/usr/bin/gnome-keyring-daemon --start --components=secrets)
27 | fi
28 |
29 | xbindkeys
30 |
31 | xset s 120 120
32 |
33 | for dev in "TPPS/2 IBM TrackPoint" "pointer:Logitech USB Receiver"; do
34 | xinput set-prop "${dev}" "Evdev Wheel Emulation" 1
35 | xinput set-prop "${dev}" "Evdev Wheel Emulation Button" 2
36 | xinput set-prop "${dev}" "Evdev Wheel Emulation Timeout" 200
37 | xinput set-prop "${dev}" "Evdev Wheel Emulation Axes" 6 7 4 5
38 | xinput set-prop "${dev}" "Device Accel Constant Deceleration" 0.95
39 | done
40 |
41 | systemctl --user import-environment PATH
42 | systemctl --user import-environment SSH_AUTH_SOCK
43 |
44 | /usr/bin/start-pulseaudio-x11
45 |
46 | exec i3
47 |
--------------------------------------------------------------------------------
/setup-x.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ "$1" = "" ]; then
4 | if [[ -z $arch && -e /etc/pacman.conf ]]; then
5 | arch=arch
6 | fi
7 | else
8 | arch=$1
9 | fi
10 |
11 | if [ "_$arch" = "_mac" ]; then
12 | exit 1
13 | fi
14 |
15 | if [[ "_$arch" = "_arch" ]]; then
16 | sudo pacman --needed --noconfirm -Syu \
17 | xorg-server \
18 | xorg-xinput xorg-xrandr xorg-xmodmap xorg-xfontsel xorg-xev \
19 | xorg-xinit \
20 | picom \
21 | i3-wm i3status dunst dmenu \
22 | xss-lock i3lock \
23 | xbindkeys xclip xkeycaps \
24 | adobe-source-han-sans-jp-fonts adobe-source-code-pro-fonts ttf-anonymous-pro ttf-dejavu ttf-droid ttf-inconsolata otf-ipafont ttf-opensans noto-fonts noto-fonts-cjk noto-fonts-extra noto-fonts-emoji ttf-ubuntu-font-family \
25 | wezterm \
26 | alsa-utils pulseaudio pulseaudio-alsa \
27 | gnome-keyring seahorse libsecret \
28 | keychain \
29 | firefox-developer-edition
30 | #ibus ibus-skk skk-jisyo \
31 | #feh \
32 | #mpv vlc \
33 | #remmina \
34 | fi
35 |
36 | ln -sfv $HOME/git/config/linux/x/dot.xbindkeysrc ~/.xbindkeysrc
37 | ln -sfv $HOME/git/config/linux/x/dot.xinitrc ~/.xinitrc
38 | ln -sfv $HOME/git/config/linux/x/dot.Xmodmap ~/.Xmodmap
39 | ln -sfv $HOME/git/config/linux/x/dot.Xresources ~/.Xresources
40 | ln -sfv $HOME/git/config/linux/x/i3 ~/.i3
41 |
--------------------------------------------------------------------------------
/zsh/mac.zshrc_global_env:
--------------------------------------------------------------------------------
1 | # vim: ft=zsh
2 | if [[ -n "${TMUX}" ]]; then
3 | goprefix="${SORAH_GOPREFIX}"
4 | fi
5 | if [[ -z "${goprefix}" ]]; then
6 | goprefix="$(brew --prefix go)"
7 | if [[ -n "${TMUX}" ]]; then tmux setenv SORAH_GOPREFIX "${goprefix}"; fi
8 | fi
9 |
10 | [ -n "$goprefix" ] && export PATH=${goprefix}/libexec/bin:$PATH
11 |
12 | if [[ ! -e /tmp/$USER.ssh-keychain-loaded ]]; then
13 | ssh-add --apple-load-keychain
14 | touch /tmp/$USER.ssh-keychain-loaded
15 | fi
16 |
17 | # homebrew
18 |
19 | sorah_brew_prefix="$(brew --prefix)"
20 | if [ "_$sorah_brew_prefix" != "_/usr/local" ]; then
21 | export CPPFLAGS="-isystem${sorah_brew_prefix}/include $CPPFLAGS"
22 | export CFLAGS="-isystem${sorah_brew_prefix}/include $CFLAGS"
23 | export LDFLAGS="-L${sorah_brew_prefix}/lib $LDFLAGS"
24 | export PKG_CONFIG_LIBDIR="${sorah_brew_prefix}/lib/pkgconfig:/usr/lib/pkgconfig"
25 | export NODE_PATH=${sorah_brew_prefix}/lib/node_modules
26 | export PATH=${sorah_brew_prefix}/share/npm/bin:$PATH
27 | fi
28 |
29 | update_terminal_cwd() {
30 | local SEARCH=' '
31 | local REPLACE='%20'
32 | local PWD_URL="file://$HOSTNAME${PWD//$SEARCH/$REPLACE}"
33 | printf '\e]7;%s\a' "$PWD_URL"
34 | }
35 |
36 | start-circusd() {
37 | circusd --daemon --pidfile ~/.circus/circusd.pid --log-output ~/.circus/circusd.log ~/.circus.ini
38 | }
39 |
40 | export CIRCUSCTL_ENDPOINT=tcp://localhost:3830
41 |
--------------------------------------------------------------------------------
/claude/ruby.md:
--------------------------------------------------------------------------------
1 | #### General Conventions
2 |
3 | - Explicit requires at top of file
4 | - Use keyword arguments for methods with multiple parameters
5 | - Prefer `attr_reader` over instance variable access
6 | - Omit hash or keyword argument value when it is identical to key; `{foo:}` instead of `{foo: foo}`
7 |
8 | #### Module and Class Structure
9 |
10 | - Especially in AWS Lambda environment, initialize with dependency injection via `environment:` parameter
11 |
12 | #### Method Definitions
13 |
14 | - Use `def self.method_name` for class methods
15 | - Short single-line methods when appropriate
16 | - Use guard clauses for early returns
17 |
18 | #### Error Handling
19 |
20 | - Rescue specific errors (e.g., `Aws::S3::Errors::NoSuchKey`)
21 | - Raise with descriptive messages
22 |
23 | #### AWS SDK Usage
24 |
25 | - Lazy initialize AWS clients as instance variables
26 | - Pass logger to AWS clients
27 | - Use symbolized keys for AWS responses
28 |
29 | #### Constants
30 |
31 | - Use SCREAMING_SNAKE_CASE for constants
32 |
33 | #### Data handling
34 |
35 | - Use `fetch` for required hash keys
36 | - Use Hash#fetch or Array#fetch when appropriate, especially when the key or index is expected to exist.
37 | - Consistent hash syntax with colons
38 | - Use Struct or Data classes when creating structs instead of raw Hashes.
39 |
40 | #### Logging
41 |
42 | - Use structured logging with JSON when appropriate
43 | - Log important operations (locks, state changes)
44 |
--------------------------------------------------------------------------------
/script/ghq-migration.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | # Based https://github.com/hsbt/scripts/blob/master/git-go.rb
3 | require 'uri'
4 | require 'fileutils'
5 |
6 | TARGET = ENV['TARGET'] || "~/git"
7 |
8 | warn = []
9 |
10 | [Dir.glob(ARGV[0] || '*/*')].flatten.each do |dir|
11 | next unless File.directory? dir
12 | next if dir.start_with? 'config'
13 | remote_url = Dir.chdir(dir) do
14 | next unless File.directory?('.git')
15 | remotes = `git remote -v`.chomp
16 | next if remotes.empty?
17 | if remotes.match(/^origin/)
18 | remotes.scan(/origin\t(.*) \(fetch\)/).first[0]
19 | else
20 | x = remotes.scan(/(?:.+?)\t(.*) \(fetch\)/).first[0]
21 | warn << "No origin for #{dir}, using #{x}"
22 | x
23 | end
24 | end
25 |
26 | uri = URI(remote_url.to_s.gsub(/^ssh:\/\//,'').gsub(/:/, '/').gsub(/\/\/\//, '://').gsub(/git@/, 'https://').gsub(/\.git/, ''))
27 | next unless uri.hostname && uri.path
28 |
29 | h = uri.hostname
30 | u, _ = *uri.path.scan(/\/?(.+)\/(\w+)\/?/).first
31 |
32 | destination = File.expand_path("#{TARGET}/#{h}/#{u}")
33 | puts "#{dir} -> #{destination}/#{File.basename(dir)}"
34 | unless ENV["NOOP"]
35 | FileUtils.mkdir_p destination
36 | FileUtils.mv dir, destination
37 | end
38 | end
39 |
40 | if File.directory?('config')
41 | destination = File.expand_path(TARGET)
42 | FileUtils.mkdir_p destination
43 | FileUtils.mv 'config', destination
44 | end
45 |
46 | $stderr.puts warn
47 |
--------------------------------------------------------------------------------
/bin/sorah-gpg-agent-status:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | short = ARGV.include?('--short')
4 | only_warning = ARGV.include?('--warn-only')
5 |
6 | IO.popen(['gpg-connect-agent', '--no-autostart', 'getinfo version', '/bye'], 'r') do |io|
7 | out = io.read
8 | _, status = Process.waitpid2(io.pid)
9 | unless status.success?
10 | if short
11 | print '?'
12 | else
13 | $stderr.puts out
14 | end
15 | exit 1
16 | end
17 | end
18 |
19 | keys = []
20 | key = nil
21 | IO.popen(%w(gpg --list-secret-keys --with-keygrip --with-colons), 'r', &:read).each_line do |l|
22 | line = l.chomp.split(?:)
23 | case line[0]
24 | when 'sec'
25 | keys.push key if key
26 | key = {uids: [], grps: []}
27 | when 'uid'
28 | key[:uids].push line[9]
29 | when 'grp'
30 | key[:grps].push line.last
31 | end
32 | end
33 | keys.push key if key
34 |
35 | keyinfo = IO.popen(['gpg-connect-agent', '--no-autostart', "keyinfo --list", '/bye'], 'r', &:read).each_line.grep(/^S/).map { |_| _.split(' ') }.group_by {|_| _[2] }
36 |
37 | nothing_cached = true
38 | keys.each do |k|
39 | cached = k[:grps].flat_map { |_| keyinfo[_] }.select { |_| _&.fetch(6) == '1' }
40 |
41 | nothing_cached = false unless cached.empty?
42 | case
43 | when short
44 | print cached.empty? ? 'x' : 'o'
45 | when only_warning
46 | else
47 | puts "#{k[:uids].join(?,)}: #{cached.size}"
48 | end
49 | end
50 |
51 | if nothing_cached
52 | puts "gpg:! "
53 | end
54 |
55 | puts if short
56 |
--------------------------------------------------------------------------------
/bin/sorah-cargo-checksum:
--------------------------------------------------------------------------------
1 | #!/usr/bin/ruby
2 | # https://wiki.debian.org/Teams/RustPackaging/Policy
3 | # The main binary package must also ship a .cargo-checksum.json file in that directory. This file must include a key "package", whose value provides the SHA256 checksum of the corresponding upstream .crate file, as an ASCII hex lowercase string. This file must also include a key "files", with either the value {}, or a value providing checksums of the source files in the package in the format expected for a Cargo directory registry. dh-cargo expects the source package to contain this file as debian/cargo-checksum.json.
4 |
5 | require 'fileutils'
6 | require 'digest/sha2'
7 | require 'json'
8 |
9 | crate,version = ARGV[0,2]
10 | unless crate && version
11 | abort "Usage: #$0 crate_name version"
12 | end
13 |
14 | workdir = "/tmp/cargocksum-#{crate}-#{version}"
15 | FileUtils.mkdir_p File.join(workdir, "pkg")
16 | Dir.chdir workdir
17 |
18 | system "curl", "-LSsf", "-o", "./crate.tar.gz", "https://crates.io/api/v1/crates/#{crate}/#{version}/download", exception: true
19 | system "tar", "xf", "crate.tar.gz", "-C", "pkg", "--strip-components", "1", exception: true
20 |
21 | out = {
22 | files: Dir["./pkg/**/*"].map do |path|
23 | if File.file?(path)
24 | [path.sub(%r{^./pkg/}, ''), Digest::SHA2.file(path)]
25 | else
26 | nil
27 | end
28 | end.compact.sort_by(&:first).to_h,
29 | package: Digest::SHA2.file('crate.tar.gz'),
30 | }
31 |
32 | puts JSON.pretty_generate(out)
33 |
--------------------------------------------------------------------------------
/linux/x/dot.Xresources:
--------------------------------------------------------------------------------
1 | !xrdb -merge ~/.Xresource
2 | !xrdb ~/.Xresource
3 | !--- urxvt
4 | urxvt.font: xft:Source Code Pro:style=Regular:size=10,xft:Migmix 1P:size=10,xft:Noto Sans CJK JP:size=10,xft:DejaVu Sans Mono:size=10
5 |
6 | urxvt.perl-ext-common:default,clipboard,matcher,keyboard-select,font-size
7 | urxvt.keysym.C-Up: perl:font-size:increase
8 | urxvt.keysym.C-Down: perl:font-size:decrease
9 | urxvt.keysym.M-u: perl:url-select:select_next
10 | urxvt.url-launcher: /usr/bin/xdg-open
11 | urxvt.underlineURLs: True
12 | urxvt.matcher.button: 1
13 | urxvt.keysym.M-Escape:perl:keyboard-select:activate
14 | urxvt.keysym.M-c: perl:clipboard:copy
15 | urxvt.keysym.M-v: perl:clipboard:paste
16 | urxvt.keysym.M-C-v: perl:clipboard:paste_escaped
17 | urxvt.copyCommand: xsel -ib
18 | urxvt.pasteCommand: xsel -ob
19 | !URxvt.clipboard.autocopy: true
20 |
21 |
22 | urxvt.depth: 32
23 | !URxvt.transparent: true
24 | !URxvt.shading: 15
25 | urxvt.background: [85]#000000
26 | urxvt.foreground: #E4ECDC
27 |
28 | ! black
29 | *color0: #687478
30 | *color8: #A5A7A3
31 | ! red
32 | *color1: #a40000
33 | *color9: #EF2929
34 | ! green
35 | *color2: #4E9A06
36 | *color10: #8AE234
37 | ! yellow
38 | *color3: #C4A000
39 | *color11: #FCE94F
40 | ! blue
41 | *color4: #3465A4
42 | *color12: #729FCF
43 | ! purple
44 | *color5: #75507B
45 | *color13: #AD7FA8
46 | ! orange (replaces cyan)
47 | ! *color6: #ce5c00
48 | ! *color14: #fcaf3e
49 | ! white
50 | *color7: #babdb9
51 | *color15: #EEEEEC
52 |
--------------------------------------------------------------------------------
/percol/rc.py:
--------------------------------------------------------------------------------
1 | percol.view.PROMPT = ur"? %q"
2 | percol.import_keymap({
3 | "C-f" : lambda percol: percol.command.toggle_mark_and_next(),
4 | "C-r" : lambda percol: percol.command.toggle_mark_all(),
5 | "C-h" : lambda percol: percol.command.delete_backward_char(),
6 | "C-d" : lambda percol: percol.command.delete_forward_char(),
7 | "C-k" : lambda percol: percol.command.kill_end_of_line(),
8 | "C-y" : lambda percol: percol.command.yank(),
9 | "C-t" : lambda percol: percol.command.transpose_chars(),
10 | "C-a" : lambda percol: percol.command.beginning_of_line(),
11 | "C-e" : lambda percol: percol.command.end_of_line(),
12 | "C-b" : lambda percol: percol.command.backward_char(),
13 | "M-f" : lambda percol: percol.command.forward_word(),
14 | "M-b" : lambda percol: percol.command.backward_word(),
15 | "M-d" : lambda percol: percol.command.delete_forward_word(),
16 | "M-h" : lambda percol: percol.command.delete_backward_word(),
17 | "C-n" : lambda percol: percol.command.select_next(),
18 | "C-p" : lambda percol: percol.command.select_previous(),
19 | "C-v" : lambda percol: percol.command.select_next_page(),
20 | "M-v" : lambda percol: percol.command.select_previous_page(),
21 | "M-<" : lambda percol: percol.command.select_top(),
22 | "M->" : lambda percol: percol.command.select_bottom(),
23 | "C-m" : lambda percol: percol.finish(),
24 | "C-j" : lambda percol: percol.finish(),
25 | "C-g" : lambda percol: percol.cancel(),
26 | })
27 |
--------------------------------------------------------------------------------
/vim/dot.vim/snippets/_.snippets:
--------------------------------------------------------------------------------
1 | # Global snippets
2 |
3 | # (c) holds no legal value ;)
4 | snippet c)
5 | `&enc[:2] == "utf" ? "©" : "(c)"` Copyright `strftime("%Y")` ${1:`g:snips_author`}. All Rights Reserved.${2}
6 | snippet date
7 | `strftime("%Y-%m-%d")`
8 | snippet mit
9 | MIT License
10 |
11 | (c) `strftime("%Y")` Shota Fukumori (sora_h)
12 |
13 | Permission is hereby granted, free of charge, to any person obtaining a copy
14 | of this software and associated documentation files (the "Software"), to deal
15 | in the Software without restriction, including without limitation the rights
16 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
17 | copies of the Software, and to permit persons to whom the Software is
18 | furnished to do so, subject to the following conditions:
19 |
20 | The above copyright notice and this permission notice shall be included in
21 | all copies or substantial portions of the Software.
22 |
23 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
24 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
25 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
26 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
27 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
28 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
29 | THE SOFTWARE.
30 |
--------------------------------------------------------------------------------
/bin/sorah-vault-cert-issue:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'json'
3 | require 'openssl'
4 | require 'fileutils'
5 |
6 | pki,role,name,cn = ARGV[0,4]
7 | unless pki && role && name && cn
8 | abort "usage: #$0 pki role localname cn"
9 | end
10 |
11 | outdir = File.join(File.exist?('/dev/shm') ? "/dev/shm" : "/tmp", "#{ENV.fetch('USER') { Process.euid.to_s }}", 'vault-cert')
12 |
13 | FileUtils.mkdir_p outdir
14 | File.chmod 0700, outdir
15 | Dir.chdir outdir
16 |
17 | args = ["#{pki}/issue/#{role}"]
18 | args << "common_name=#{cn}"
19 | result = IO.popen(['vault', 'write', '-format=json', *args], 'r', &:read)
20 | exitstatus = $?
21 | unless exitstatus.success?
22 | raise "vault-cert #{k} failed"
23 | end
24 |
25 | FileUtils.mkdir_p "./#{name}"
26 | Dir.chdir "./#{name}"
27 |
28 | json = JSON.parse(result, symbolize_names: true)
29 | data = json.fetch(:data)
30 |
31 | key_file = "key.pem"
32 | File.open("#{key_file}.new", 'w', 0600) do |io|
33 | io.puts data.fetch(:private_key)
34 | end
35 | File.rename("#{key_file}.new", key_file)
36 |
37 | {
38 | 'ca.pem' => [data.fetch(:ca_chain, data.fetch(:issuing_ca))].flatten.join(?\n),
39 | 'cert.pem' => data.fetch(:certificate),
40 | 'fullchain.pem' => [data.fetch(:certificate), data.fetch(:ca_chain, data.fetch(:issuing_ca))].flatten.join(?\n),
41 | }.each do |path, content|
42 | File.open("#{path}.new", 'w', 0644) do |io|
43 | io.puts content
44 | end
45 | File.rename("#{path}.new", path)
46 | end
47 |
48 | puts "serial=#{data.fetch(:serial_number)}"
49 | puts Dir["#{Dir.pwd}/*"]
50 |
--------------------------------------------------------------------------------
/bin/pdsql:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'bundler/inline'
3 | require 'optparse'
4 | gemfile do
5 | source 'https://rubygems.org'
6 | gem 'aws-sdk-dsql'
7 | end
8 |
9 | host = ENV['PGHOST']
10 | role = ENV['PGUSER'] || 'admin'
11 |
12 | opt = OptionParser.new() do |o|
13 | o.on('-h', '--host HOST', 'DSQL Host') { host = _1 }
14 | o.on('-U', '--username USER', 'role') { role = _1 }
15 | end
16 |
17 | opt.parse(ARGV.dup)
18 | host_match = host.match(/\A(?[a-z0-9-]+)\.dsql\.(?[a-z0-9-]+).on\.aws\z/) or abort "Invalid DSQL host format #{host.inspect}"
19 | region = host_match[:region]
20 |
21 | @dsql = Aws::DSQL::Client.new
22 | token_generator = Aws::DSQL::AuthTokenGenerator.new(credentials: @dsql.config.credentials)
23 |
24 |
25 | password_token = {
26 | endpoint: host,
27 | region:,
28 | expires_in: 3600,
29 | }.then do |params|
30 | case role
31 | when "admin"
32 | token_generator.generate_db_connect_admin_auth_token(params)
33 | else
34 | token_generator.generate_db_connect_auth_token(params)
35 | end
36 | end
37 |
38 | pgpassfile = Tempfile.new('pgpass').tap do |f|
39 | f.write("*:*:*:#{role}:#{password_token}\n")
40 | f.flush
41 | end
42 | ObjectSpace.undefine_finalizer(pgpassfile)
43 | ENV['PGPASSFILE'] = pgpassfile.path
44 | ENV['PGPASSWORD'] = password_token
45 | ENV['PGSSLMODE'] = 'verify-full'
46 | #ENV['PGSSLCERTMODE'] = 'require'
47 | ENV['PGSSLROOTCERT'] = '/etc/ssl/certs/ca-bundle.crt'.then { File.exist?(_1) ? _1: 'system' }
48 | ENV['PGDATABASE'] = 'postgres'
49 | ENV['PGUSER'] = role
50 |
51 | if ARGV[0] == '--'
52 | ARGV.shift
53 | exec(*ARGV)
54 | else
55 | exec 'psql', *ARGV
56 | end
57 |
--------------------------------------------------------------------------------
/bin/sorah-as-set-routes:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'whois'
3 | require 'ipaddr'
4 |
5 | @whois = Whois::Server::Adapters::Standard.new(nil, nil, "whois.radb.net")
6 |
7 | def as_set_members(query)
8 | warn "as_set_members(#{query.inspect})"
9 | resp = @whois.lookup("-T as-set #{query}")
10 | resp.to_s.gsub(/\r?\n\s+/, ' ').each_line.grep(/^members:/).flat_map { |_| _.chomp.gsub(/^members:\s*/, '').split(/,\s*|\s+/) }
11 | end
12 |
13 | def as_set_asns(query, skip: [])
14 | return [] if skip.include?(query)
15 | warn "as_set_asns(#{query.inspect})"
16 | as_sets, asns = as_set_members(query).partition { |_| _.start_with?('AS-') }
17 | (asns + as_sets.map { |_| as_set_asns(_, skip: [query,*skip,*asns]) }).flatten.uniq
18 | end
19 |
20 | def as_routes(origin, family: nil)
21 | #warn "as_routes(#{origin})"
22 | resp = @whois.lookup("-i origin #{origin}")
23 | resp.parts.flat_map do |part|
24 | part.body.each_line.grep(/^route6?:\s*/)
25 | .map { |_| _.sub(/^route6?:\s*/, '')&.chomp }
26 | .map { |_| [IPAddr.new(_), _.split(?/).last.to_i] }
27 | .select { |a,l| family.nil? || a.family == family }
28 | .uniq
29 | .sort_by { |a,l| [a.to_i, l] }
30 | end.compact
31 | end
32 |
33 | family = nil
34 | family = Socket::AF_INET if ARGV.delete('-4')
35 | family = Socket::AF_INET6 if ARGV.delete('-6')
36 |
37 | asns = as_set_asns(ARGV[0])
38 |
39 | puts "define AS_SET_ROUTES_#{ARGV[0].gsub(/^AS-/,'').gsub(/-/,'_')} = ["
40 | asns.each do |asn|
41 | puts " /* #{asn} */"
42 | as_routes(asn, family: family).each do |(prefix, prefixlen)|
43 | puts " #{prefix}/#{prefixlen},"
44 | end
45 | end
46 | puts "];"
47 |
--------------------------------------------------------------------------------
/bin/sorah-ruby-build:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env zsh
2 | set -e
3 | set -x
4 |
5 | newver=$1
6 |
7 | if [[ -z "$1" ]]; then
8 | echo "Usage: $0 newver"
9 | fi
10 |
11 | newabi="$(echo -n $newver|sed -e 's/\.[0-9]*$/.0/')"
12 | headof="$(echo -n $newver|sed -e 's/\.[0-9]*$//')"
13 |
14 | eval "$(rbenv init -)"
15 |
16 | if [[ -e ~/.rbenv/plugins/ruby-build ]]; then
17 | git -C ~/.rbenv/plugins/ruby-build pull
18 | fi
19 |
20 | if [ "$(uname)" = "Darwin" ]; then
21 | export RUBY_CONFIGURE_OPTS=" \
22 | --enable-shared \
23 | --with-gcc=clang CXX=clang++ \
24 | --with-out-ext=tk,tk/* \
25 | --with-valgrind \
26 | --with-readline-dir=$(brew --prefix readline) \
27 | --with-openssl-dir=$(brew --prefix openssl) \
28 | --enable-yjit=stats"
29 |
30 | export optflags="-O0 -ggdb3"
31 | export RUBY_CFLAGS="${optflags} ${debugflags}"
32 | export CXXFLAGS="${optflags} ${debugflags}"
33 | unset CC CXX
34 | else
35 | export RUBY_CONFIGURE_OPTS=" \
36 | --enable-shared \
37 | --with-out-ext=tk,tk/* \
38 | --with-valgrind \
39 | --enable-yjit=stats"
40 |
41 | export optflags="-O0 -ggdb3"
42 | export RUBY_CFLAGS="${optflags} ${debugflags}"
43 | export CXXFLAGS="${optflags} ${debugflags}"
44 | fi
45 |
46 | rbenv install --keep $newver
47 | export RBENV_VERSION=$newver
48 |
49 | gem pristine --extensions
50 |
51 | gem i bundler --no-doc
52 | sorah-ruby-ensure-gems
53 |
54 | rbenv rehash
55 |
56 | if [[ ( ! -z $headof ) ]]; then
57 | if [ "$(uname)" = "Darwin" ]; then
58 | ln -sf $newver $RBENV_ROOT/versions/$headof.new
59 | mv $RBENV_ROOT/versions/$headof{.new,}
60 | else
61 | ln -sfT $newver $RBENV_ROOT/versions/$headof
62 | fi
63 | fi
64 |
--------------------------------------------------------------------------------
/etc/debian-packages.txt:
--------------------------------------------------------------------------------
1 | ruby2.4 --arch=amd64 --dist=dist/2.4 --master=master-2.4 trusty xenial
2 | ruby2.5 --arch=amd64 --dist=dist/2.5 --master=master-2.5 trusty xenial bionic
3 | ruby2.6 --arch=amd64 --dist=dist/2.6 --master=master-2.6 xenial bionic focal buster
4 | ruby2.7 --arch=amd64,arm64 --dist=dist/2.7 --master=master-2.7 bionic focal buster bullseye
5 | ruby3.0 --arch=amd64,arm64 --dist=dist/3.0 --master=master-3.0 bionic focal jammy buster bullseye
6 | ruby3.1 --arch=amd64,arm64 --dist=dist/3.1 --master=master-3.1 bionic focal jammy bullseye
7 | ruby3.2 --arch=amd64,arm64 --dist=dist/3.2 --master=master-3.2 focal jammy noble bullseye bookworm
8 | ruby3.3 --arch=amd64,arm64 --dist=dist/3.3 --master=master-3.3 focal jammy noble bullseye bookworm trixie
9 | ruby3.4 --arch=amd64,arm64 --dist=dist/3.4 jammy noble bookworm trixie
10 | ruby-defaults --arch=amd64,arm64 jammy noble bookworm trixie
11 | rubygems-integration --arch=amd64 jammy noble bookworm trixie
12 | envchain --arch=amd64,arm64 focal jammy noble bullseye
13 | subsystemctl --arch=amd64,arm64 focal bullseye
14 | mitamae --arch=amd64,arm64 focal jammy noble bullseye bookworm
15 | prometheus-exporter-proxy --arch=amd64,arm64 focal jammy noble
16 | cfssl --arch=amd64,arm64 jammy noble
17 | needroleshere --arch=amd64,arm64 focal jammy noble
18 | bird2 --arch=amd64,arm64 jammy
19 |
--------------------------------------------------------------------------------
/misc/nkmi-s3.cyberduckprofile:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Protocol
6 | s3
7 | Vendor
8 | nkmi-s3
9 | Description
10 | nkmi-s3
11 | Default Nickname
12 | nkmi-s3
13 | OAuth Authorization Url
14 | https://login.microsoftonline.com/ede2c435-85f7-4b12-af38-11cf031513d3/oauth2/v2.0/authorize
15 | OAuth Token Url
16 | https://login.microsoftonline.com/ede2c435-85f7-4b12-af38-11cf031513d3/oauth2/v2.0/token
17 | OAuth Client ID
18 | 001c7a02-1349-4190-977e-2273bb70075e
19 | OAuth Client Secret
20 |
21 | OAuth Redirect Url
22 | ${oauth.handler.scheme}://oauth
23 | Scopes
24 |
25 | profile
26 | email
27 | openid
28 |
29 | STS Endpoint
30 | https://sts.ap-northeast-1.amazonaws.com/
31 | Authorization
32 | AuthorizationCode
33 | Password Configurable
34 |
35 | Username Configurable
36 |
37 | Username Placeholder
38 | Username
39 | Properties
40 |
41 | s3.assumerole.rolearn
42 | arn:aws:iam::341857463381:role/Cyberduck
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/mac/dot.config/linearmouse/linearmouse.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema" : "https:\/\/schema.linearmouse.app\/0.10.0",
3 | "schemes" : [
4 | {
5 | "if" : {
6 | "device" : {
7 | "vendorID" : "0x0",
8 | "productID" : "0x0",
9 | "category" : "trackpad",
10 | "productName" : "Apple Internal Keyboard \/ Trackpad"
11 | }
12 | },
13 | "pointer" : {
14 | "speed" : 0.8133,
15 | "disableAcceleration" : false,
16 | "acceleration" : 1.6063
17 | }
18 | },
19 | {
20 | "if" : {
21 | "device" : {
22 | "serialNumber" : "DABD35E3A48A0017",
23 | "category" : "mouse",
24 | "vendorID" : "0x46d",
25 | "productName" : "Gaming Mouse G600",
26 | "productID" : "0xc24a"
27 | }
28 | },
29 | "pointer" : {
30 | "disableAcceleration" : true,
31 | "acceleration" : 0.8
32 | },
33 | "scrolling" : {
34 | "speed" : {
35 | "vertical" : 0
36 | },
37 | "acceleration" : {
38 | "vertical" : 1
39 | },
40 | "reverse" : {
41 | "vertical" : true
42 | },
43 | "distance" : {
44 | "vertical" : 7
45 | }
46 | }
47 | },
48 | {
49 | "scrolling" : {
50 | "reverse" : {
51 | "vertical" : true
52 | }
53 | },
54 | "pointer" : {
55 | "acceleration" : 1,
56 | "disableAcceleration" : true
57 | },
58 | "if" : {
59 | "device" : {
60 | "vendorID" : "0x46d",
61 | "productName" : "USB Receiver",
62 | "productID" : "0xc53f",
63 | "category" : "mouse"
64 | }
65 | }
66 | }
67 | ]
68 | }
--------------------------------------------------------------------------------
/bin/tmux-nested-cssh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'optparse'
3 | require 'shellwords'
4 |
5 | def or_die
6 | exit $?.exitstatus || 128 if $? && !$?.success?
7 | end
8 |
9 | options = {
10 | name: nil,
11 | ssh: 'ssh',
12 | ssh_options: [],
13 | command: nil,
14 | }
15 | parser = OptionParser.new do |opt|
16 | opt.on('-n NAME', '--name NAME', 'window name') do |name|
17 | options[:name] = name
18 | end
19 |
20 | opt.on('--ssh-option OPTION', '-s OPTION', 'give ssh option') do |opt|
21 | options[:ssh_options] << opt
22 | end
23 |
24 | opt.on('--ssh-options OPTIONS', '-S OPTIONS', 'give ssh options') do |opt|
25 | options[:ssh_options].concat opt.split(/\s+/)
26 | end
27 |
28 | opt.on('--command CMD', '-c CMD', 'remote command to execute') do |opt|
29 | options[:command] = opt
30 | end
31 | end
32 | hosts = parser.parse(ARGV)
33 | options[:name] ||= 'cssh'
34 |
35 | commands = hosts.map do |host|
36 | cmd = [options[:ssh], *options[:ssh_options], host]
37 | if options[:command]
38 | cmd << options[:command]
39 | end
40 | cmd.shelljoin
41 | end
42 |
43 | first_command = commands.shift
44 |
45 | new_window_cmd = [*%w(tmux new-window -d -P -n), options[:name], first_command]
46 | puts "$ #{new_window_cmd.join(' ')}"
47 | target = IO.popen(new_window_cmd, &:read).chomp
48 | or_die
49 | target.gsub!(/\..*$/,'')
50 |
51 | commands.each do |ssh_cmd|
52 | cmd = [*%w(tmux split-window -d -t), target, ssh_cmd, ';', *%w(select-layout -t), target, 'tiled']
53 | puts "$ #{cmd.join(' ')}"
54 | system *cmd
55 | or_die
56 | end
57 |
58 | sleep 2
59 |
60 | cmd = [*%w(tmux set-window-option -t), target, *%w(synchronize-panes on)]
61 | puts "$ #{cmd.join(' ')}"
62 | system *cmd
63 | or_die
64 |
65 | cmd = [*%w(tmux select-window -t), target]
66 | puts "$ #{cmd.join(' ')}"
67 | system *cmd
68 | or_die
69 |
--------------------------------------------------------------------------------
/linux/memo.txt:
--------------------------------------------------------------------------------
1 | pacman -Syu gnupg pinentry jq screen tmux zsh git strace mercurial subversion bazel adobe-source-han-sans-jp-fonts adobe-source-code-pro-fonts ttf-anonymous-pro ttf-dejavu ttf-droid ttf-inconsolata otf-ipafont ttf-opensans noto-fonts noto-fonts-cjk noto-fonts-extra noto-fonts-emoji ttf-ubuntu-font-family mtr nmap netcat tcpdump traceroute bind-tools
2 | pacman -Syu swaks autossh bridge-utils curl ipcalc iperf dstat pv smartmontools usbutils cryptsetup dosfstools lvm2 htop iotop lsof parallel
3 | pacman -Syu i3 i3lock i3status dunst dmenu compton xautolock xbindkeys xclip xkeycaps rxvt-unicode
4 | pacman -Syu xrandr xorgs-drivers xinput xfontsel xmodmap xorg-server
5 | pacman -Syu xorg-server
6 | pacman -Syu xf86-video-intel
7 | pacman -Syu xorg-xinput xorg-xrandr xorg-xmodmap xorg-xfontsel xorg-xev
8 | pacman -Syu urxvt-perls
9 | pacman -Syu urxvt-perls urxvt-font-size
10 | pacman -Syu urxvt-perls
11 | pacman -Syu xorg-startx
12 | pacman -Syu startx
13 | pacman -Syu xorg-xinit
14 | pacman -Syu feh
15 | pacman -Syu xautolock
16 | pacman -Syu alsa-utils pulseaudio pulseaudio-alsa
17 | pacman -Syu gnome-keyring
18 | pacman -Syu seashore
19 | pacman -Syu seahorse
20 | pacman -Syu imagemagick
21 | pacman -Syu gnome-keyring-query
22 | pacman -Syu secret-tool
23 | pacman -Syu libsecret
24 | pacman -Syu aws-cli
25 | pacman -Syu postgresql postgresql-libs
26 | pacman -Syu postgresql-old-upgrade
27 | pacman -Syu keychain
28 | pacman -Syu terminfo
29 | pacman -Syu docker docker-compose
30 | pacman -Syu libvirt qemu virt-manager
31 | pacman -Syu ebtables nftables
32 | pacman -Syu ibus ibus-skk
33 | pacman -Syu skk-jisyo
34 | pacman -Syu go
35 | pacman -Syu ffmpeg mpv vlc
36 | pacman -Syu remmina
37 | pacman -Syu google-chrome-dev
38 | pacman -Syu community/firefox-developer-edition
39 | pacman -Syu whois
40 | pacman -Syu python-pip
41 | pacman -Syu xkbset
42 |
--------------------------------------------------------------------------------
/nginx/local.80.conf:
--------------------------------------------------------------------------------
1 | # vim: ft=nginx
2 | user nobody;
3 | worker_processes 1;
4 |
5 | error_log /tmp/nginx.80.error.log;
6 | pid /tmp/nginx.80.pid;
7 |
8 | events {
9 | worker_connections 1024;
10 | }
11 |
12 | http {
13 | include /etc/nginx/mime.types;
14 | default_type application/octet-stream;
15 |
16 | server_names_hash_bucket_size 128;
17 |
18 | log_format ltsv "status:$status"
19 | "\ttime:$time_iso8601"
20 | "\treqtime:$request_time"
21 | "\tmethod:$request_method"
22 | "\turi:$request_uri"
23 | "\tprotocol:$server_protocol"
24 | "\tua:$http_user_agent"
25 | "\tfowardedfor:$http_x_forwarded_for"
26 | "\thost:$remote_addr"
27 | "\treferer:$http_referer"
28 | "\tserver_name:$server_name"
29 | "\tvhost:$host"
30 | "\tsize:$body_bytes_sent"
31 | "\treqsize:$request_length"
32 | "\truntime:$upstream_http_x_runtime";
33 |
34 | access_log /tmp/nginx.80.access.log ltsv;
35 |
36 | gzip on;
37 | gzip_http_version 1.0;
38 | gzip_proxied any;
39 | gzip_min_length 500;
40 | gzip_disable "MSIE [1-6]\.";
41 | gzip_disable "Mozilla/4";
42 | gzip_types text/plain text/xml text/css
43 | text/comma-separated-values
44 | text/javascript application/x-javascript application/javascript
45 | application/atom+xml
46 | application/json;
47 |
48 | server {
49 | listen [::]:80 default_server;
50 | listen 80 default_server;
51 | server_name localhost;
52 |
53 | location / {
54 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
55 | proxy_set_header Host $http_host;
56 | proxy_redirect off;
57 | proxy_pass http://127.0.0.1:8080;
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/bin/sorah-aws-assume-role:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'etc'
3 | require 'aws-sdk-s3' # Workaround of load failure
4 | require 'aws-sdk-sts'
5 |
6 | unless ARGV[0]
7 | abort "Usage #$0 [-q] ARN [cmd]"
8 | end
9 |
10 | quiet = ARGV.delete('-q')
11 | mfa = ARGV.delete('--mfa')
12 | arn = ARGV.shift
13 | user = ENV['USER'] || Etc.getpwuid(Process.uid).name
14 | session_name = user
15 |
16 | @sts = Aws::STS::Client.new(region: 'us-east-1')
17 | def identity
18 | @identity ||= @sts.get_caller_identity()
19 | end
20 |
21 |
22 | unless arn.start_with?('arn:')
23 | arn = "arn:aws:iam::#{identity.account}:role/#{arn}"
24 | end
25 |
26 | if mfa
27 | mfa_serial = "arn:aws:iam::#{identity.account}:mfa/#{identity.arn.split(?/).last}"
28 | end
29 |
30 | options = {
31 | role_arn: arn,
32 | role_session_name: session_name,
33 | }
34 | options[:duration_seconds] = ENV['EXPIRES_IN'].to_i if ENV['EXPIRES_IN']
35 |
36 | if mfa_serial
37 | options[:serial_number] = mfa_serial
38 | print '2FA Token: '
39 | options[:token_code] = $stdin.gets.chomp
40 | end
41 |
42 | puts "=> Assuming #{arn}" unless quiet
43 | credentials = begin
44 | @sts.assume_role(options).credentials
45 | end
46 |
47 | puts " * Expires at: #{credentials.expiration} (#{'%.2f' % (credentials.expiration - Time.now)}s)" unless quiet
48 | if options[:duration_seconds]
49 | puts " * (requested #{options[:duration_seconds]}s)" unless quiet
50 | end
51 |
52 | ENV['AWS_ACCESS_KEY_ID'] = credentials.access_key_id
53 | ENV['AWS_SECRET_ACCESS_KEY'] = credentials.secret_access_key
54 | ENV['AWS_SESSION_TOKEN'] = credentials.session_token
55 |
56 | ENV['AWS_DEFAULT_REGION'] ||= 'ap-northeast-1'
57 | ENV['AWS_REGION'] ||= 'ap-northeast-1'
58 |
59 | ENV['SORAH_PROMPT_HOSTNAME'] ||= arn.split(?/).last
60 | ENV.delete 'EXPIRES_IN'
61 |
62 | if ARGV.empty?
63 | exec ENV['SHELL'] || 'bash'
64 | else
65 | exec(*ARGV)
66 | end
67 |
--------------------------------------------------------------------------------
/wezterm.lua:
--------------------------------------------------------------------------------
1 | local wezterm = require 'wezterm'
2 | local config = {}
3 |
4 | local is_windows = wezterm.target_triple == "x86_64-pc-windows-msvc"
5 |
6 | if is_windows then
7 | config.default_prog = { 'pwsh' }
8 | end
9 |
10 | config.font = wezterm.font_with_fallback{
11 | {
12 | family = 'Source Code Pro',
13 | weight = 'Regular',
14 | assume_emoji_presentation = false,
15 | },
16 | 'Noto Sans CJK JP',
17 | 'Noto Sans JP',
18 | }
19 | config.cell_width = 0.8
20 | config.line_height = 1.0
21 | config.font_size = 11
22 | --config.harfbuzz_features = { 'calt=0', 'clig=0', 'liga=0' }
23 |
24 | config.color_scheme = 'Tango (terminal.sexy)'
25 | config.colors = {
26 | background = "#0c0c0c",
27 | foreground = "#d8d8d8",
28 | }
29 | config.window_background_opacity = 0.90
30 |
31 | config.window_frame = {
32 | font = wezterm.font_with_fallback{
33 | { family = 'Source Sans 3', assume_emoji_presentation = false },
34 | { family = 'Source Sans Pro', assume_emoji_presentation = false },
35 | { family = 'Noto Sans CJK JP', weight = 'Medium' },
36 | { family = 'Noto Sans JP', weight = 'Medium' },
37 | { family = 'Noto Sans', weight = 'Medium' },
38 | },
39 | font_size = 8,
40 | }
41 | config.command_palette_font_size = 10.0
42 | config.char_select_font_size = 12.0
43 | config.hide_tab_bar_if_only_one_tab = true
44 |
45 | config.window_padding = {
46 | left = '0.2cell',
47 | right = '0.2cell',
48 | top = '0cell',
49 | bottom = '0cell',
50 | }
51 |
52 | config.keys = {
53 | { mods = 'ALT', key = 'c', action = wezterm.action.CopyTo 'Clipboard' },
54 | { mods = 'ALT', key = 'v', action = wezterm.action.PasteFrom 'Clipboard' },
55 | { mods = 'ALT', key = '.', action = wezterm.action.CharSelect {
56 | }},
57 | }
58 |
59 | config.detect_password_input = true
60 |
61 | config.canonicalize_pasted_newlines = "CarriageReturn"
62 |
63 | return config
64 |
--------------------------------------------------------------------------------
/bin/256colors2.pl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/perl
2 | # Author: Todd Larason
3 | # $XFree86: xc/programs/xterm/vttests/256colors2.pl,v 1.2 2002/03/26 01:46:43 dickey Exp $
4 |
5 | # use the resources for colors 0-15 - usually more-or-less a
6 | # reproduction of the standard ANSI colors, but possibly more
7 | # pleasing shades
8 |
9 | # colors 16-231 are a 6x6x6 color cube
10 | for ($red = 0; $red < 6; $red++) {
11 | for ($green = 0; $green < 6; $green++) {
12 | for ($blue = 0; $blue < 6; $blue++) {
13 | printf("\x1b]4;%d;rgb:%2.2x/%2.2x/%2.2x\x1b\\",
14 | 16 + ($red * 36) + ($green * 6) + $blue,
15 | ($red ? ($red * 40 + 55) : 0),
16 | ($green ? ($green * 40 + 55) : 0),
17 | ($blue ? ($blue * 40 + 55) : 0));
18 | }
19 | }
20 | }
21 |
22 | # colors 232-255 are a grayscale ramp, intentionally leaving out
23 | # black and white
24 | for ($gray = 0; $gray < 24; $gray++) {
25 | $level = ($gray * 10) + 8;
26 | printf("\x1b]4;%d;rgb:%2.2x/%2.2x/%2.2x\x1b\\",
27 | 232 + $gray, $level, $level, $level);
28 | }
29 |
30 |
31 | # display the colors
32 |
33 | # first the system ones:
34 | print "System colors:\n";
35 | for ($color = 0; $color < 8; $color++) {
36 | print "\x1b[48;5;${color}m ";
37 | }
38 | print "\x1b[0m\n";
39 | for ($color = 8; $color < 16; $color++) {
40 | print "\x1b[48;5;${color}m ";
41 | }
42 | print "\x1b[0m\n\n";
43 |
44 | # now the color cube
45 | print "Color cube, 6x6x6:\n";
46 | for ($green = 0; $green < 6; $green++) {
47 | for ($red = 0; $red < 6; $red++) {
48 | for ($blue = 0; $blue < 6; $blue++) {
49 | $color = 16 + ($red * 36) + ($green * 6) + $blue;
50 | print "\x1b[48;5;${color}m ";
51 | }
52 | print "\x1b[0m ";
53 | }
54 | print "\n";
55 | }
56 |
57 |
58 | # now the grayscale ramp
59 | print "Grayscale ramp:\n";
60 | for ($color = 232; $color < 256; $color++) {
61 | print "\x1b[48;5;${color}m ";
62 | }
63 | print "\x1b[0m\n";
64 |
--------------------------------------------------------------------------------
/bin/weather.rb:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | # coding: utf-8
3 | require 'open-uri'
4 | require 'cgi'
5 |
6 | CACHE = "/tmp/weather.rb.cache"
7 |
8 | if File.exist?(CACHE) && (Time.now - File::Stat.new(CACHE).mtime) <= 300
9 | print File.read(CACHE)
10 | exit
11 | end
12 |
13 | locations = {"Minato, Tokyo" => "港", "Utsunomiya, Tochigi" => '鬱'}
14 |
15 | def api_url(location)
16 | "http://www.google.com/ig/api?hl=en&weather=#{CGI.escape(location)}"
17 | end
18 |
19 | urls = locations.map{|l, s| [api_url(l), s] }
20 |
21 | def condition_to_symbol(condition)
22 | case condition.downcase
23 | when /sunny$/
24 | h = Time.now.hour
25 | (20 <= h || h <= 4) ? "☾" : "☀"
26 | when /(rain|drizzle|showers)$/, "flurries"
27 | "☂"
28 | when /snow( showers)?$/, "sleet", "icy"
29 | "❅"
30 | when /cloudy$/, "overcast"
31 | "☁"
32 | when /storms?$/
33 | "⚑ ☂"
34 | when 'dust', 'fog', 'smoke', 'haze', 'mist'
35 | "♨"
36 | when 'windy'
37 | "⚑"
38 | when 'clear'
39 | "✈"
40 | else
41 | "[#{condition}]"
42 | end
43 | end
44 |
45 | def weather(url)
46 | xml = open(url, 'r', &:read)
47 | current = xml.match(/(.+?)<\/current_conditions>/)[1]
48 | forecast = xml.match(/(.+?)<\/forecast_conditions>/)[1]
49 | current_sym = condition_to_symbol(current.match(//)[1])
50 | current_temp = current.match(//)[1] + "C"
51 | forecast_sym = forecast ? "#{condition_to_symbol(forecast.match(//)[1])}" : ""
52 | "#{current_sym} #{current_temp}#{forecast_sym}"
53 | rescue Exception
54 | nil
55 | end
56 |
57 | error = false
58 | result = urls.map { |url, s|
59 | w = weather(url)
60 | error = true if w.nil?
61 | "#{s}#{w || "✗>_<✗"}"
62 | }.join(" ")+" "
63 |
64 | if error
65 | File.delete CACHE
66 | else
67 | open(CACHE, 'w'){|io| io.print result }
68 | end
69 |
70 | print "✱ "+result
71 |
--------------------------------------------------------------------------------
/ssh/ssh_config.d/10-nkmi.conf:
--------------------------------------------------------------------------------
1 | # vim: ft=sshconfig
2 | ## nkmi.me
3 |
4 | Host *.c.nkmi.me *.compute.nkmi.me
5 | Port 9922
6 | ProxyCommand ~/git/config/bin/ssh-proxy-home %h %p
7 |
8 | Host *.n.nkmi.me *.nw.nkmi.me
9 | Port 22
10 | ProxyCommand ~/git/config/bin/ssh-proxy-home %h %p
11 |
12 | Host mafuyu.c.nkmi.me sumika.c.nkmi.me mafuyu.nkmi.org sumika.nkmi.org mafuyu
13 | Port 22
14 | LocalForward localhost:3000 localhost:3000
15 | LocalForward localhost:3001 localhost:3001
16 | LocalForward localhost:3002 localhost:3002
17 | LocalForward localhost:5173 localhost:5173
18 | LocalForward localhost:16252 localhost:16252
19 |
20 | Host mafuyu.nkmi.org sumika.nkmi.org i-*.nkmi.org
21 | ProxyCommand cloudflared access ssh --hostname %h
22 |
23 | Host ssh.bastion.nkmi.me
24 | HostName ssh.bastion.nkmi.me
25 | Port 9922
26 | User sorah
27 |
28 | HostKeyAlias nkmi-bastion
29 |
30 | IdentitiesOnly yes
31 | IdentityFile ~/.ssh/id_ecdsa
32 |
33 | ControlMaster auto
34 | ControlPath ~/.ssh/master-%r@%h:%p
35 | ControlPersist 1h
36 |
37 | LocalForward localhost:13389 10.3.0.4:3389
38 |
39 | Host *.compute.nkmi.me *.c.nkmi.me
40 | Port 9922
41 | ProxyCommand ~/git/config/bin/ssh-proxy-home %h %p
42 |
43 | Host *.nw.nkmi.me *.n.nkmi.me
44 | User root
45 | Port 22
46 | ProxyCommand ~/git/config/bin/ssh-proxy-home %h %p
47 | IdentitiesOnly yes
48 | IdentityFile ~/.ssh/id_ecdsa
49 | IdentityFile ~/.ssh/id_rsa
50 | PreferredAuthentications publickey,password
51 | HostKeyAlgorithms +ssh-rsa
52 | KexAlgorithms +diffie-hellman-group14-sha1,diffie-hellman-group1-sha1
53 | SetEnv TERM=screen
54 |
55 | Host *.sorah.jp sorah.jp privs.net *.privs.net *.sorah.me *.her *.nkmi.me
56 | Port 9922
57 | # AddressFamily inet
58 | User sorah
59 | IdentityFile ~/.ssh/id_ecdsa
60 | IdentityFile ~/.ssh/id_rsa
61 | IdentitiesOnly yes
62 |
63 | Host kaede.n.nkmi.me
64 | KexAlgorithms diffie-hellman-group14-sha1
65 |
66 |
67 |
--------------------------------------------------------------------------------
/linux/x/i3/status.conf:
--------------------------------------------------------------------------------
1 | # i3status configuration file.
2 | # see "man i3status" for documentation.
3 |
4 | # It is important that this file is edited as UTF-8.
5 | # The following line should contain a sharp s:
6 | # ß
7 | # If the above line is not correctly displayed, fix your editor first!
8 |
9 | general {
10 | colors = true
11 | interval = 2
12 | }
13 |
14 | order += "disk /"
15 | # order += "wireless wlp3s0"
16 | order += "cpu_temperature 0"
17 | order += "battery 0"
18 | order += "load"
19 | order += "volume master"
20 | order += "tztime pt"
21 | order += "tztime cet"
22 | order += "tztime gmt"
23 | order += "tztime utc"
24 | order += "tztime local"
25 |
26 | cpu_temperature 0 {
27 | format = "%degrees C"
28 | }
29 |
30 | wireless wlp3s0 {
31 | format_up = "W: (%quality at %essid) %ip"
32 | format_down = "W: down"
33 | }
34 |
35 | ethernet eth0 {
36 | # if you use %speed, i3status requires root privileges
37 | format_up = "E: %ip (%speed)"
38 | format_down = "E: down"
39 | }
40 |
41 | battery 0 {
42 | format = "%status %percentage %remaining"
43 | }
44 |
45 | run_watch DHCP {
46 | pidfile = "/var/run/dhclient*.pid"
47 | }
48 |
49 | run_watch VPN {
50 | pidfile = "/var/run/vpnc/pid"
51 | }
52 |
53 | tztime local {
54 | format = "%Y-%m-%d %H:%M:%S"
55 | }
56 |
57 | tztime pt {
58 | timezone = "America/Los_Angeles"
59 | format = "PT %a %H:%M"
60 | }
61 |
62 | tztime utc {
63 | timezone = "Etc/UTC"
64 | format = "UTC %a %H:%M"
65 | }
66 |
67 | tztime cet {
68 | timezone = "Europe/Madrid"
69 | format = "CET %a %H:%M"
70 | }
71 |
72 | tztime gmt {
73 | timezone = "Europe/London"
74 | format = "%Z %a %H:%M"
75 | }
76 |
77 | load {
78 | format = "%1min"
79 | }
80 |
81 | disk "/" {
82 | format = "%free"
83 | }
84 |
85 |
86 | volume master {
87 | format = "♪: %volume"
88 | format_muted = "♪:mute (%volume)"
89 | device = "pulse:0"
90 | }
91 |
--------------------------------------------------------------------------------
/nginx/local.conf:
--------------------------------------------------------------------------------
1 | # vim: ft=nginx
2 | worker_processes 1;
3 | daemon off;
4 |
5 | error_log /tmp/nginx.local.error.log;
6 | pid /tmp/nginx.local.pid;
7 |
8 | events {
9 | worker_connections 1024;
10 | }
11 |
12 | http {
13 | include /etc/nginx/mime.types;
14 | default_type application/octet-stream;
15 |
16 | server_names_hash_bucket_size 128;
17 |
18 | log_format ltsv "status:$status"
19 | "\ttime:$time_iso8601"
20 | "\treqtime:$request_time"
21 | "\tmethod:$request_method"
22 | "\turi:$request_uri"
23 | "\tprotocol:$server_protocol"
24 | "\tua:$http_user_agent"
25 | "\tfowardedfor:$http_x_forwarded_for"
26 | "\thost:$remote_addr"
27 | "\treferer:$http_referer"
28 | "\tserver_name:$server_name"
29 | "\tvhost:$host"
30 | "\tsize:$body_bytes_sent"
31 | "\treqsize:$request_length"
32 | "\tapptime:$upstream_response_time"
33 | "\truntime:$upstream_http_x_runtime";
34 |
35 | access_log /tmp/nginx.local.access.log ltsv;
36 |
37 | gzip on;
38 | gzip_http_version 1.0;
39 | gzip_proxied any;
40 | gzip_min_length 500;
41 | gzip_disable "MSIE [1-6]\.";
42 | gzip_disable "Mozilla/4";
43 | gzip_types text/plain text/xml text/css
44 | text/comma-separated-values
45 | text/javascript application/x-javascript application/javascript
46 | application/atom+xml
47 | application/json;
48 |
49 | include /Users/sorah/git/config/nginx/conf.d/*;
50 | include /Users/sorah/.nginx/*;
51 |
52 | server {
53 | listen [::1]:8080 default_server;
54 | listen 127.0.0.1:8080 default_server;
55 |
56 | server_name localhost;
57 |
58 | location / {
59 | allow 127.0.0.1;
60 | allow ::1;
61 | deny all;
62 | }
63 |
64 | location /nginx_status {
65 | allow 127.0.0.1;
66 | allow ::1;
67 | deny all;
68 | stub_status on;
69 | }
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/bin/sorah-debsign:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | if which apt-get >/dev/null 2>/dev/null; then
3 | exec debsign "$@"
4 | fi
5 | if [[ -z "$(docker image ls -q devscripts)" ]]; then
6 | docker buildx build --load -t devscripts -f ~/git/config/docker/Dockerfile.devscripts ~/git/config/docker
7 | fi
8 | #docker run --rm devscripts debsign "$@"
9 |
10 | gpg_agent_socket=$(gpgconf --list-dirs agent-socket)
11 | gpg_user="$GPGUSER"
12 | if [[ -z "$gpg_user" ]]; then
13 | gpg_user=$(gpg --list-secret-keys --with-colons|grep '^fpr:'|head -n1|cut -d: -f10)
14 | fi
15 |
16 | gpg_public_key=/tmp/debsign-$$-pub.asc
17 | gpg --export "$gpg_user" > $gpg_public_key
18 | trap "rm $gpg_public_key" EXIT
19 |
20 | export DEBSIGN_MAINT="$(git config --global --get user.email)"
21 |
22 | # Unlock GPG secret key when necessary
23 | if [[ -z $GPG_TTY ]]; then
24 | export GPG_TTY=$(tty)
25 | fi
26 | echo | gpg -u "${gpg_user}" --clearsign --output /dev/null
27 |
28 | bootstrap_cmds=$(mktemp)
29 | cat >${bootstrap_cmds} <<-EOF
30 | useradd -u $(id -u) -m buildbot
31 | chown buildbot:buildbot /home/buildbot
32 | chown buildbot:buildbot /home/buildbot/.gnupg
33 | chmod 700 /home/buildbot
34 | chmod 700 /home/buildbot/.gnupg
35 | cp /run.sh /run2.sh
36 | chown buildbot:buildbot /run2.sh
37 |
38 | exec sudo -u buildbot env DEBSIGN_KEYID='$DEBSIGN_KEYID' DEBSIGN_MAINT='$DEBSIGN_MAINT' bash -xe /run2.sh "\$@"
39 | EOF
40 |
41 | cmds=$(mktemp)
42 | cat >${cmds} <<-'EOF'
43 |
44 | gpg2 --import --no-tty --batch < /pub.asc
45 | if [ -z $DEBSIGN_KEYID ]; then
46 | export DEBSIGN_KEYID=0x$(gpg2 --with-colons --list-keys|grep '^sub:'|head -n1|cut -d: -f 5)
47 | fi
48 | exec debsign -pgpg2 -k $DEBSIGN_KEYID "$@"
49 | EOF
50 |
51 | docker run \
52 | --rm \
53 | --net=host \
54 | --tty \
55 | --interactive \
56 | --volume "${gpg_public_key}:/pub.asc" \
57 | --volume "${gpg_agent_socket}:/home/buildbot/.gnupg/S.gpg-agent" \
58 | --volume "${cmds}:/run.sh:ro" \
59 | --volume "${bootstrap_cmds}:/bootstrap.sh:ro" \
60 | --volume "$(pwd):/here" \
61 | --workdir /here \
62 | -e DEBSIGN_KEYID \
63 | devscripts \
64 | bash -xe /bootstrap.sh "$@"
65 |
--------------------------------------------------------------------------------
/tmux/tmux.conf:
--------------------------------------------------------------------------------
1 | # sorah's tmux.conf
2 | # Author: sorah (Sorah Fukumori)
3 | # License: Public domain
4 |
5 | #set-option -g default-command "~/.tmux.reattacher -l zsh"
6 |
7 | set-window-option -g mode-keys vi
8 |
9 |
10 | set -g status-interval 5
11 | set -g status-left-length 10
12 | set -g status-right-length 120
13 | set -g status-left '#[bg=colour162,fg=white]#(tmux-backtick-sync-pane.sh)#[bg=colour255,fg=black]#H'
14 | # #[bg=colour22] #(weather.rb)
15 | # #[bg=colour33,fg=white]#(np_itunes_mac.sh)
16 | set -g status-right "#[bg=colour166,fg=white] #(sorah-gpg-agent-status --warn-only)[#(loadavg.sh)] %m/%d %H:%M"
17 |
18 | set -g status-bg colour255
19 | set -g status-fg black
20 |
21 | #set -g renumber-windows on
22 |
23 | # setw -g window-status-current-bg colour162
24 | setw -g window-status-current-style bg=colour33,fg=colour255
25 | setw -g window-status-format '#I#F #W'
26 | setw -g window-status-current-format ' #I#F #W '
27 |
28 | #set -g default-terminal screen-256color
29 | set -g default-terminal tmux-256color
30 | setw -g xterm-keys on
31 |
32 | set-option -g history-limit 5000
33 |
34 | set-option -g focus-events on
35 | set-option -sa terminal-overrides ',XXX:RGB'
36 |
37 | set -g prefix ^Z
38 | source-file ~/.tmux.prefix
39 |
40 | set-option -sa escape-time 10
41 |
42 | # spliting like my vim
43 | bind s split-window -v
44 | bind v split-window -h
45 | bind h select-pane -L
46 | bind j select-pane -D
47 | bind k select-pane -U
48 | bind l select-pane -R
49 |
50 | bind [ select-window -p
51 | bind ] select-window -n
52 |
53 | unbind ^Z
54 | bind z send-keys ^Z
55 |
56 | unbind n
57 | bind n new-window
58 |
59 | unbind d
60 | bind d detach
61 |
62 | unbind ,
63 | bind , previous-window
64 | unbind .
65 | bind . next-window
66 |
67 | unbind A
68 | bind A command-prompt "rename-window %%"
69 |
70 | unbind L
71 | bind L choose-window
72 |
73 | unbind Q
74 | bind Q confirm-before "kill-server"
75 | unbind H
76 | bind H confirm-before "kill-window"
77 |
78 | unbind r
79 | bind r refresh-client
80 |
81 | unbind Escape
82 | bind Escape copy-mode
83 |
84 | bind-key S setw synchronize-panes
85 |
--------------------------------------------------------------------------------
/bin/sorah-i3lock-prepare:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -x
3 |
4 | source_dir=$HOME/pictures/Tumbletail/
5 | image_path=/tmp/sorah-i3lock.png
6 | next_image_path=/tmp/sorah-i3lock-next.png
7 |
8 | try=0
9 | while [[ "$try" -lt 10 ]]; do
10 | source_image="$(find "${source_dir}" -type f | grep -v 'gif$' | shuf -n 1)"
11 | if [[ -z "${source_image}" ]]; then continue; fi
12 | if identify "${source_image}"; then
13 | source_geometry="$(cat "${source_image}" | identify - | cut -d' ' -f 3)"
14 | if echo "${source_geometry}" | grep -qP '^\d+x\d+$'; then
15 | source_geometry_w="$(echo "${source_geometry}" | cut -d x -f 1)"
16 | source_geometry_h="$(echo "${source_geometry}" | cut -d x -f 2)"
17 | if [ "${source_geometry_h}" -le "${source_geometry_w}" ]; then
18 | source_wide=1
19 | else
20 | source_wide=0
21 | fi
22 | break
23 | fi
24 | fi
25 | let try += 1
26 | done
27 |
28 | mkdir -p ~/tmp
29 | echo "${source_image}" >> ~/tmp/sorah-i3lock-image.log
30 |
31 | export DISPLAY="${DISPLAY:-:0}"
32 | resolution="$(xrandr | grep primary |grep -Po '\d+x\d+')"
33 | if [ -z "${resolution}" ]; then
34 | resolution="$(xrandr | grep ' connected ' | head -n1 |grep -Po '\d+x\d+')"
35 | fi
36 | #resolution=1366x768
37 | if [ -n "${resolution}" ]; then
38 | resolution_w="$(echo "${resolution}" | cut -d x -f 1)"
39 | resolution_h="$(echo "${resolution}" | cut -d x -f 2)"
40 |
41 | resolution_opt=""
42 | if [ "_${source_wide}" = "_1" ]; then
43 | convert "${source_image}" -resize "${resolution_w}x" +repage -gravity center -crop "${resolution}+0+0" "${next_image_path}"
44 | else
45 | source_offset_h="$(expr "${source_geometry_h}" / 10)"
46 | source_new_geometry_h="$(expr "${source_geometry_h}" - "${source_offset_h}")"
47 | convert "${source_image}" \
48 | -crop "${source_geometry_w}x${source_new_geometry_h}+0+${source_offset_h}" \
49 | +repage \
50 | -resize "${resolution_w}x" \
51 | -crop "${resolution}+0+0" \
52 | "${next_image_path}"
53 | fi
54 | else
55 | cp -v $source_image "${next_image_path}"
56 | fi
57 |
58 | [ -e "${source_image}" ] && identify "${source_image}"
59 | if identify $next_image_path; then
60 | mv -v "${next_image_path}" "${image_path}"
61 | fi
62 |
63 | sorah-i3lock-publish >/tmp/sorah-i3lock-publish.log 2>&1 &
64 |
--------------------------------------------------------------------------------
/bin/sorah-aws-ec2-remove-ami:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'aws-sdk-ec2'
3 | require 'optparse'
4 |
5 | region = nil
6 | tag, tag_value = nil, nil
7 | keep = nil
8 | name = nil
9 | dry_run = false
10 |
11 | parser = OptionParser.new do |_|
12 | _.on('-r REGION', '--region REGION') { |reg| region = reg }
13 | _.on('-n NAME', '--name NAME') { |n| name = n }
14 | _.on('-t TAG', '--tag TAG') { |n| tag, tag_value = n.split(?=,2) }
15 | _.on('-k KEEP', '--keep KEEP') { |n| keep = n.to_i }
16 | _.on('-N', '--dry-run') { dry_run = true }
17 | end
18 |
19 | args = parser.parse(ARGV)
20 | if keep.nil?
21 | keep = tag ? 3 : 0
22 | end
23 |
24 | ec2s = Hash.new { |h, k| h[k] = Aws::EC2::Resource.new(region: k) }
25 |
26 | targets = {}
27 |
28 | args.each do |arg|
29 | values = arg.split(?:, 2)
30 | if values.size == 2
31 | ami_region, ami_id = values
32 | else
33 | ami_region, ami_id = region, values.first
34 | end
35 |
36 | (targets[ami_region] ||= []) << ami_id
37 | end
38 |
39 | if name || tag
40 | regions = region ? [region] : Aws::EC2::Client.new(region: ENV['AWS_DEFAULT_REGION'] || 'ap-northeast-1').describe_regions.regions.map(&:region_name)
41 |
42 | regions.each do |target_region|
43 | puts "=> Finding AMI from #{target_region}"
44 | filter = tag ? {name: "tag:#{tag}", values: [tag_value]} : {name: 'name', values: [name]}
45 | ec2s[target_region].images(filters: [filter, {name: 'state', values: %w(available)}]).each do |image|
46 | (targets[target_region] ||= []) << image
47 | end
48 | if targets[target_region]
49 | if keep > 0
50 | targets[target_region] = targets[target_region].sort_by(&:creation_date)[0...-keep]
51 | end
52 | targets[target_region].each do |image|
53 | puts " * #{image.image_id} | #{image.name}"
54 | end
55 | end
56 | end
57 | puts
58 | end
59 |
60 | targets.each do |ami_region, images|
61 | ec2 = ec2s[ami_region]
62 |
63 | images.each do |image|
64 | image = image.is_a?(Aws::EC2::Image) ? image : ec2.image(image)
65 |
66 | puts "=> #{image.image_id} @ #{ami_region}"
67 | snapshot_ids = image.block_device_mappings.map do |mapping|
68 | mapping.ebs && mapping.ebs.snapshot_id
69 | end.compact
70 |
71 | puts " * Deregister #{image.image_id} (#{image.name}) @ #{ami_region}"
72 | image.deregister unless dry_run
73 | snapshot_ids.each do |snapshot_id|
74 | puts " * Delete snapshot #{snapshot_id} @ #{ami_region}"
75 | ec2.client.delete_snapshot(snapshot_id: snapshot_id) unless dry_run
76 | end
77 | end
78 | end
79 |
80 |
--------------------------------------------------------------------------------
/bin/sorah-playing:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'json'
3 | require 'open-uri'
4 | require 'net/http'
5 | require 'pp'
6 |
7 | webhook_urls = File.read(File.expand_path('~/.sorah-playing')).each_line.map(&:chomp)
8 |
9 | def find_its(*keywords, country: 'JP')
10 | query = URI.encode_www_form_component keywords.join(' ')
11 | p JSON.parse(open("https://itunes.apple.com/search?term=#{query}&media=music&limit=1&country=#{country}", 'r', &:read), symbolize_names: true)
12 | rescue OpenURI::HTTPError, JSON::ParserError => e
13 | $stderr.puts "===Ignoring error from ITS==="
14 | $stderr.puts keywords.inspect
15 | $stderr.puts e.full_message
16 | $stderr.puts "============================="
17 | end
18 |
19 | def current_track
20 | scpt = <<-EOS
21 | var it = Application("iTunes");
22 | var track = it.currentTrack();
23 | var res = "{}";
24 | if (track && it.playerState() == "playing") {
25 | res = JSON.stringify({"name": track.name(), "artist": track.artist(), "album": track.album(), "composer": track.composer(), "position": it.playerPosition(), "id": track.id()});
26 | }
27 |
28 | res
29 | EOS
30 |
31 | IO.popen(["osascript", "-l", "JavaScript"], 'w+') do |io|
32 | io.puts scpt
33 | io.close_write
34 | JSON.parse(io.read.chomp, symbolize_names: true)
35 | end
36 | end
37 |
38 | last = current_track
39 | p last
40 | sleep 15
41 | loop do
42 | track = current_track
43 | p track
44 | unless track[:id]
45 | sleep 15
46 | next
47 | end
48 | if track.fetch(:id) == last[:id]
49 | sleep 15
50 | next
51 | end
52 | if track.fetch(:position) < 15
53 | sleep 3
54 | next
55 | end
56 | puts "==> Searching iTS"
57 | its = find_its(current_track.values_at(:name, :artist))
58 | music = its.fetch(:results, [])[0]
59 | pp music
60 | puts "==> Payload"
61 | payload = {
62 | text: [
63 | if music&.fetch(:trackViewUrl, nil)
64 | ":musical_note: <#{music[:trackViewUrl]}|#{track.fetch(:name)}>"
65 | else
66 | ":musical_note: #{track.fetch(:name)}"
67 | end,
68 | ":cd: #{track.fetch(:album)}",
69 | ":studio_microphone: #{track.fetch(:artist)}",
70 | if track.fetch(:composer).empty?
71 | nil
72 | else
73 | ":pencil2: #{track.fetch(:composer)}"
74 | end,
75 | ].compact.join(?\n),
76 | username: 'sorah playing',
77 | icon_url: music&.fetch(:artworkUrl60, nil),
78 | }
79 | pp payload
80 | puts "==> Posting"
81 | webhook_urls.each do |url|
82 | puts " * #{url}"
83 | Net::HTTP.post_form(URI(url), payload: payload.to_json)
84 | end
85 | last = track
86 | sleep 15
87 | end
88 |
--------------------------------------------------------------------------------
/vim/dot.vim/doc/tags-ja:
--------------------------------------------------------------------------------
1 | !_TAG_FILE_ENCODING utf-8 //
2 | :VimProcBang vimproc.jax /*:VimProcBang*
3 | :VimProcRead vimproc.jax /*:VimProcRead*
4 | (quicklaunch-0) quicklaunch.jax /*(quicklaunch-0)*
5 | (quicklaunch-1) quicklaunch.jax /*(quicklaunch-1)*
6 | (quicklaunch-9) quicklaunch.jax /*(quicklaunch-9)*
7 | (quicklaunch-list) quicklaunch.jax /*(quicklaunch-list)*
8 | g:quicklaunch_commands quicklaunch.jax /*g:quicklaunch_commands*
9 | g:quicklaunch_no_default_key_mappings quicklaunch.jax /*g:quicklaunch_no_default_key_mappings*
10 | g:vimproc_dll_path vimproc.jax /*g:vimproc_dll_path*
11 | quicklaunch quicklaunch.jax /*quicklaunch*
12 | quicklaunch-changelog quicklaunch.jax /*quicklaunch-changelog*
13 | quicklaunch-contents quicklaunch.jax /*quicklaunch-contents*
14 | quicklaunch-interface quicklaunch.jax /*quicklaunch-interface*
15 | quicklaunch-introduction quicklaunch.jax /*quicklaunch-introduction*
16 | quicklaunch-key-mappings quicklaunch.jax /*quicklaunch-key-mappings*
17 | quicklaunch-settings quicklaunch.jax /*quicklaunch-settings*
18 | quicklaunch.txt quicklaunch.jax /*quicklaunch.txt*
19 | vimproc#fopen() vimproc.jax /*vimproc#fopen()*
20 | vimproc#get_command_name() vimproc.jax /*vimproc#get_command_name()*
21 | vimproc#get_last_errmsg() vimproc.jax /*vimproc#get_last_errmsg()*
22 | vimproc#get_last_status() vimproc.jax /*vimproc#get_last_status()*
23 | vimproc#kill() vimproc.jax /*vimproc#kill()*
24 | vimproc#open() vimproc.jax /*vimproc#open()*
25 | vimproc#pgroup_open() vimproc.jax /*vimproc#pgroup_open()*
26 | vimproc#plineopen2() vimproc.jax /*vimproc#plineopen2()*
27 | vimproc#plineopen3() vimproc.jax /*vimproc#plineopen3()*
28 | vimproc#popen2() vimproc.jax /*vimproc#popen2()*
29 | vimproc#popen3() vimproc.jax /*vimproc#popen3()*
30 | vimproc#ptyopen() vimproc.jax /*vimproc#ptyopen()*
31 | vimproc#socket_open() vimproc.jax /*vimproc#socket_open()*
32 | vimproc#system() vimproc.jax /*vimproc#system()*
33 | vimproc#system_bg() vimproc.jax /*vimproc#system_bg()*
34 | vimproc#version() vimproc.jax /*vimproc#version()*
35 | vimproc-bugs vimproc.jax /*vimproc-bugs*
36 | vimproc-changelog vimproc.jax /*vimproc-changelog*
37 | vimproc-commands vimproc.jax /*vimproc-commands*
38 | vimproc-contents vimproc.jax /*vimproc-contents*
39 | vimproc-examples vimproc.jax /*vimproc-examples*
40 | vimproc-functions vimproc.jax /*vimproc-functions*
41 | vimproc-install vimproc.jax /*vimproc-install*
42 | vimproc-interface vimproc.jax /*vimproc-interface*
43 | vimproc-introduction vimproc.jax /*vimproc-introduction*
44 | vimproc-todo vimproc.jax /*vimproc-todo*
45 | vimproc-variables vimproc.jax /*vimproc-variables*
46 | vimproc.jax vimproc.jax /*vimproc.jax*
47 |
--------------------------------------------------------------------------------
/linux/x/dot.config/fontconfig/fonts.conf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | rgb
6 |
7 |
8 |
9 | true
10 |
11 |
12 |
13 |
19 |
20 |
21 | monospace
22 | Source Code Pro
23 |
24 | Source Code Pro
25 | Migmix 1P
26 | Noto Sans CJK JP
27 | DejaVu Sans Mono
28 |
29 |
30 |
31 |
32 |
33 | serif
34 | Source Serif Pro
35 |
36 | Source Serif Pro
37 | IPAMincho
38 |
39 |
40 |
41 | sans-serif
42 | Source Sans Pro
43 |
44 | Source Sans Pro
45 | Noto Sans CJK JP
46 |
47 |
48 |
49 | urxvt
50 | Source Code Pro
51 |
52 | Source Code Pro
53 | Migmix 1P
54 | Noto Sans CJK JP
55 | DejaVu Sans Mono
56 |
57 |
58 |
59 |
60 |
61 | Courier
62 |
63 |
64 | Source Code Pro
65 |
66 |
67 |
68 |
69 |
70 | Consolas
71 |
72 |
73 | Source Code Pro
74 |
75 |
76 |
77 |
78 |
79 | Helvetica
80 |
81 |
82 | Open Sans
83 |
84 |
85 |
86 |
87 |
88 | Verdana
89 |
90 |
91 | sans-serif
92 |
93 |
94 |
95 |
--------------------------------------------------------------------------------
/claude/terraform.md:
--------------------------------------------------------------------------------
1 | #### General Conventions
2 |
3 | - Prefer data sources over hardcoded values (e.g., `data.aws_region.current`, `data.aws_caller_identity.current`)
4 | - Use locals for computed values and merging configuration
5 | - Use `jsonencode` to compose JSON objects, instead of raw string literal.
6 | - Always include common data sources in aws.tf: `data.aws_region.current`, `data.aws_caller_identity.current`, `data.aws_default_tags.current`
7 |
8 | #### Resource Naming
9 |
10 | - Use snake_case for all resource names and variables, except the followings:
11 | - But resource name in Terraform should match the actual resource name in a provider. e.g. When there is an attribute like `name = "FooBar"`, then `resource "..." "FooBar"`.
12 | - IAM role and policy names use PascalCase (e.g., `Ec2Bastion`, `EcsApp`)
13 | - Use hyphenated lowercase for resource identifiers (e.g., `ec2-default`, `dns-cache`)
14 |
15 | #### File Organization
16 |
17 | **Standard File Structure**: Each Terraform directory should follow this pattern:
18 | - `aws.tf` - AWS provider configuration with standard data sources
19 | - `backend.tf` - S3 backend configuration
20 | - `versions.tf` - Provider version constraints
21 | - Resource-specific files: `vpc.tf`, `sg.tf`, `route53.tf`, `iam.tf`, etc.
22 | - Multiple IAM files: `iam_lambda.tf`, `iam_states.tf`, `iam_ec2_default.tf`
23 | - `outputs.tf` - Output definitions (when needed)
24 | - `locals.tf` - Local values (when needed)
25 |
26 | ##### Variable Definitions
27 |
28 | - Always specify `type` for variables
29 | - Use `default = {}` for optional map variables
30 | - Group related variables together with blank lines
31 |
32 | ##### Resource Arguments
33 |
34 | - Multi-line arguments should be consistently formatted
35 | - Use trailing commas in lists
36 | - Align equals signs for readability in blocks
37 |
38 |
39 | #### AWS specific instructions
40 |
41 | - Use `data.aws_iam_policy_document` whenever possible, instead of jsonencode.
42 | - Use AWS managed policies via `data.aws_iam_policy` when available
43 |
44 | **Tags and Metadata**
45 |
46 | - Use default_tags at provider level for Project and Component tags
47 | - Include meaningful resource-specific tags when needed
48 | - Use descriptive comments for non-obvious configurations
49 |
50 | #### Resource and data source specific instruction
51 |
52 | **aws_iam_role**
53 | - IAM role trust policies use separate `data.aws_iam_policy_document` with `-trust` suffix
54 | - IAM policies split into multiple documents when they get large
55 | - Use specific resource ARNs in IAM policies, avoid wildcards where possible
56 | - Role names use PascalCase (e.g., `NetKea`, `NwEc2Default`)
57 | - Include descriptive `description` field referencing the Terraform path
58 |
59 | **aws_iam_instance_profile**
60 | - Name should match the associated IAM role name
61 | - Use `aws_iam_role.Role.name` for both name and role attributes
62 |
--------------------------------------------------------------------------------
/bin/sorah-debuild-auto:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'shellwords'
3 | ARCH = ENV['ARCH'] || 'amd64'
4 | ARCH_FILTER = ENV['ARCH_FILTER']&.split(/,\s*/)
5 | DIST_FILTER = ENV['DIST_FILTER']&.split(/,\s*/)
6 | NO_MERGE = ENV['NO_MERGE']
7 | CHECK = ENV['CHECK']
8 |
9 | def debian_version(dist)
10 | line = File.open("debian/changelog", "r", &:gets)
11 | [
12 | line.split(' ',2)[0],
13 | line.match(/\((.+?)\)/)[1].sub(/~dist/, "~#{dist}").gsub(/\.(?=\.|$|lock$)/, ".#"),
14 | ]
15 | end
16 |
17 | BuildOption = Struct.new(:distro, :arch, :dist, :master, keyword_init: true)
18 | config = File.read(File.expand_path('~/git/config/etc/debian-packages.txt'))
19 | .each_line
20 | .map { |line| c = line.split(/\s+/); [c[0], c[1..-1]] }
21 | .group_by(&:first)
22 | .map do |source, lines|
23 | [
24 | source,
25 | lines.flat_map do |(_s, options)|
26 | args, distros = options.partition { |_| _.start_with?('--') }
27 | args = args.map { |_| _.match(/^--(.+?)(?:=(.+))?$/) }.compact.map { |_| [_[1].to_sym, _[2]] }.to_h
28 | distros.reverse.map do |distro|
29 | BuildOption.new(**args.merge(distro: distro))
30 | end
31 | end
32 | ]
33 | end.to_h
34 |
35 | source = File.read('./debian/control').lines.grep(/^Source: /).first&.split(/\s+/)&.fetch(1) || File.basename(Dir.pwd)
36 | build_options = config.fetch(source)
37 |
38 | # def system(*a); p a; end
39 |
40 | pp build_options
41 | sleep 3
42 |
43 | change_files = []
44 |
45 | build_options.each do |opt|
46 | next if DIST_FILTER && !DIST_FILTER.include?(opt.distro)
47 |
48 | if opt.dist
49 | master = opt.master || 'master'
50 | system("git", "checkout", "#{opt.dist}/#{opt.distro}", exception: true)
51 | # system("git", "reset", "--hard", "origin/#{args['dist']}/#{distro}") or system("git", "reset", "--hard", master, exception: true)
52 | unless NO_MERGE
53 | system("git", "merge", "--no-ff", "--no-edit", master, exception: true)
54 | end
55 | end
56 |
57 | debname, debver = debian_version(opt.distro)
58 |
59 | archs = (opt.arch || ARCH).split(?,)
60 | archs.each do |arch|
61 | next if ARCH_FILTER && !ARCH_FILTER.include?(arch)
62 |
63 | puts "===> #{opt.inspect} @ #{arch}"
64 |
65 | change_file = "debian/out/#{debname}_#{debver}_#{arch}.changes"
66 | change_files << change_file
67 | if File.exist?(change_file)
68 | puts "skip (#{change_file})"
69 | next
70 | end
71 |
72 | cmd = case arch
73 | when ARCH
74 | ["sorah-debuild", opt.distro, *ARGV]
75 | when 'arm64'
76 | ["sorah-debuild-codebuild", '--arm', opt.distro, *ARGV]
77 | else
78 | raise "unsupported arch #{arch.inspect}"
79 | end
80 | puts "$ #{cmd.shelljoin}"
81 |
82 | res = CHECK ? true : system(*cmd)
83 |
84 | unless res
85 | warn "!!!!!!!!!!!!!!!!! #{opt.inspect}; failed at #{opt.distro} @ #{arch}: #{cmd.shelljoin}"
86 | print "hit return to continue after investigated"
87 | $stdin.gets
88 | end
89 | end
90 | end
91 |
92 | if CHECK
93 | puts "-------- check -------"
94 | ok = true
95 | change_files.each do |f|
96 | e = File.exist?(f)
97 | puts "* #{f} #{e ? '[ ok ]' : '[ MISSING ]'}"
98 | ok &&= e
99 | end
100 | exit 1 unless ok
101 | end
102 |
--------------------------------------------------------------------------------
/debuild-docker/buildspec.yml:
--------------------------------------------------------------------------------
1 | version: 0.2
2 |
3 | env:
4 | DEBIAN_FRONTEND: 'noninteractive'
5 | DEBUILD_CODEBUILD_SOURCE: 'source-name'
6 | DEBUILD_CODEBUILD_BUILD: 'full'
7 | DEBUILD_CODEBUILD_DIST: ''
8 |
9 | phases:
10 | install:
11 | commands:
12 | - 'useradd -m buildbot'
13 |
14 | # using IPv6 addrinfo registered to "localhost" might fail because containers have no IPv6
15 | # https://img.sorah.jp/x/20200519_171607_MSwJTE7E3T.png
16 | # https://github.com/moby/moby/issues/35954
17 | - "( sed -e '/^::1/d' /etc/hosts > /etc/hosts2 && cp /etc/hosts2 /etc/hosts ) || :"
18 |
19 | # Allow use of debhelper-compat 13 in backport
20 | - 'if grep -q bionic /etc/apt/sources.list; then printf "Package: debhelper libdebhelper-perl dh-autoreconf dwz\nPin: release a=bionic-backports\nPin-Priority: 500\n\n" | tee -a /etc/apt/preferences; fi'
21 | - 'if grep -q focal /etc/apt/sources.list; then printf "Package: debhelper libdebhelper-perl\nPin: release a=focal-backports\nPin-Priority: 500\n\n" | tee -a /etc/apt/preferences; fi'
22 | # /etc/apt/sources.list.d/debian.sources
23 |
24 | - "if [[ -e /etc/apt/sources.list.d/ubuntu.sources ]]; then sed -i -e 's|archive\\.ubuntu\\.com/ubuntu|ap-northeast-1.ec2.archive.ubuntu.com/ubuntu|g' /etc/apt/sources.list.d/ubuntu.sources; fi"
25 | - "if [[ -e /etc/apt/sources.list ]]; then sed -i -e 's|archive\\.ubuntu\\.com/ubuntu|ap-northeast-1.ec2.archive.ubuntu.com/ubuntu|g' /etc/apt/sources.list; fi"
26 |
27 | - apt-get update
28 | - apt-get install -y --no-install-recommends tzdata debhelper dh-make devscripts gnupg2 equivs lsb-release sudo
29 | - 'apt-get install -y --no-install-recommends fakeroot || :'
30 |
31 | pre_build:
32 | commands:
33 | - 'mkdir out'
34 | - 'mkdir -p "${DEBUILD_CODEBUILD_SOURCE}"'
35 | - 'tar xf source.tar* -C "${DEBUILD_CODEBUILD_SOURCE}"'
36 | - 'rm -fv source.tar*'
37 |
38 | - 'cd "${DEBUILD_CODEBUILD_SOURCE}"'
39 | - 'if [ -z "${DEBUILD_CODEBUILD_DIST}" ]; then export DEBUILD_CODEBUILD_DIST=$(lsb_release -sc); fi'
40 | - 'dist="${DEBUILD_CODEBUILD_DIST}"'
41 | - 'sed -i -e "s/_dist_/${dist}/g" debian/changelog || :'
42 | - 'sed -i -e "s/~dist) unstable; urgency=/~${dist}) ${dist}; urgency=/g" debian/changelog || :'
43 | - 'sed -i -e "s/~dist) unstable-proposed; urgency=/~${dist}) ${dist}-proposed; urgency=/g" debian/changelog || :'
44 | - 'sed -i -e "s/~dist)/~${dist})/g" debian/changelog || :'
45 | - "head -n20 debian/changelog"
46 |
47 | - "mk-build-deps -r -i -t 'apt-get -y -o Debug::pkgProblemResolver=yes --no-install-recommends' debian/control"
48 | - 'chown -R buildbot:buildbot "${CODEBUILD_SRC_DIR}"'
49 |
50 | build:
51 | # run-as: buildbot
52 | commands:
53 | - 'cd "${CODEBUILD_SRC_DIR}/${DEBUILD_CODEBUILD_SOURCE}"'
54 | # Case 170406024301505
55 | - |
56 | perl -e '$SIG{"INT"} = "DEFAULT"; exec @ARGV' -- sudo -u buildbot -H debuild --no-sign --build=${DEBUILD_CODEBUILD_BUILD}
57 | - 'ls -la "${CODEBUILD_SRC_DIR}"'
58 |
59 | post_build:
60 | commands:
61 | - 'cd "${CODEBUILD_SRC_DIR}"'
62 | - 'mkdir -p out'
63 | - 'cat ${DEBUILD_CODEBUILD_SOURCE}/debian/files | cut -d" " -f1 | xargs mv -v -t out/'
64 | - 'mv -v ./*.changes -t out/'
65 | - 'mv -v ./*.dsc -t out/ || :'
66 | - 'mv -v ./*.debian.tar* -t out/ || :'
67 | - 'mv -v ./*.tar* -t out/ || :'
68 | - 'rm out/*_orig.tar.* || :'
69 | - 'mkdir -p "./out/.codebuild-${CODEBUILD_BUILD_ID}"'
70 | - 'mv -v ./${DEBUILD_CODEBUILD_SOURCE}/debian/*.symbols -t "out/.codebuild-${CODEBUILD_BUILD_ID}" || :'
71 |
72 | artifacts:
73 | base-directory: 'out'
74 | files:
75 | - '**/*'
76 |
--------------------------------------------------------------------------------
/bin/sorah-ec2-swap-root:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'aws-sdk-ec2'
3 |
4 | if ARGV.size < 2
5 | abort "Usage: #$0 target-id inspector-id [vol-id]"
6 | end
7 |
8 | ec2 = Aws::EC2::Client.new(region: ENV['AWS_REGION'] || ENV['AWS_DEFAULT_REGION'])
9 |
10 | target_id, inspector_id, vol_id = ARGV[0, 3]
11 |
12 | instances = ec2.describe_instances(instance_ids: [target_id, inspector_id]).reservations.flat_map(&:instances)
13 |
14 | target_instance = instances.find { |_| _.instance_id == target_id }
15 | inspector_instance = instances.find { |_| _.instance_id == inspector_id }
16 |
17 | unless vol_id
18 | dev = target_instance.block_device_mappings.find { |_| _.device_name == '/dev/sda1' } ||
19 | inspector_instance.block_device_mappings.find { |_| _.device_name == '/dev/xvdf' }
20 | vol_id = dev.ebs.volume_id
21 | end
22 |
23 | vol = ec2.describe_volumes(volume_ids: [vol_id]).volumes.first
24 | state = case
25 | when vol.state == 'available'
26 | :available
27 | when vol.attachments.any? { |_| _.instance_id == target_instance.instance_id }
28 | :attached_to_target
29 | when vol.attachments.any? { |_| _.instance_id == inspector_instance.instance_id }
30 | :attached_to_inspector
31 | else
32 | raise "unknown state"
33 | end
34 |
35 | puts "=> Performing on:"
36 | puts
37 | puts " * Volume #{vol_id} (state=#{state})"
38 | puts " * Target #{target_instance.instance_id} (#{target_instance.tags.find{ |_| _.key == 'Name' }&.value})"
39 | puts " * Inspector #{inspector_instance.instance_id} (#{inspector_instance.tags.find{ |_| _.key == 'Name' }&.value})"
40 |
41 | if target_instance.state.name == 'running'
42 | puts "=> Stopping target..."
43 | puts
44 | p ec2.stop_instances(instance_ids: [target_instance.instance_id])
45 | puts
46 | end
47 |
48 | if state == :attached_to_inspector
49 | puts "=> Detaching from inspector..."
50 | puts
51 | p ec2.detach_volume(volume_id: vol_id, instance_id: inspector_instance.instance_id)
52 | puts
53 | end
54 |
55 | puts "=> Waiting target to be stopped..." unless target_instance.state.name == 'stopped'
56 | until target_instance.state.name == 'stopped'
57 | sleep 5
58 | target_instance = ec2.describe_instances(instance_ids: [target_id]).reservations.first.instances.first
59 | puts " * #{target_instance.state.name}..."
60 | end
61 | puts
62 |
63 | if state == :attached_to_target
64 | puts "=> Detaching from target..."
65 | puts
66 | p ec2.detach_volume(volume_id: vol_id, instance_id: target_instance.instance_id)
67 | puts
68 | end
69 |
70 | puts "=> Waiting the volume to be available..."
71 | until vol.state == 'available'
72 | sleep 5
73 | vol = ec2.describe_volumes(volume_ids: [vol.volume_id]).volumes.first
74 | puts " * #{vol.state}"
75 | end
76 |
77 | case state
78 | when :attached_to_target # move to inspector
79 | puts "=> Attaching to the inspector"
80 |
81 | puts
82 | p ec2.attach_volume(volume_id: vol_id, instance_id: inspector_instance.instance_id, device: '/dev/xvdf')
83 | puts
84 |
85 | until vol.state == 'in-use'
86 | sleep 5
87 | vol = ec2.describe_volumes(volume_ids: [vol.volume_id]).volumes.first
88 | puts " * #{vol.state}"
89 | end
90 |
91 | when :available, :attached_to_inspector # move to target then start
92 | puts "=> Attaching to the target"
93 |
94 | puts
95 | p ec2.attach_volume(volume_id: vol_id, instance_id: target_instance.instance_id, device: '/dev/sda1')
96 | puts
97 |
98 | until vol.state == 'in-use'
99 | sleep 5
100 | vol = ec2.describe_volumes(volume_ids: [vol.volume_id]).volumes.first
101 | puts " * #{vol.state}"
102 | end
103 | puts
104 |
105 | ec2.start_instances(instance_ids: [target_instance.instance_id])
106 | end
107 |
--------------------------------------------------------------------------------
/win/terminal-profiles.json:
--------------------------------------------------------------------------------
1 | // To view the default settings, hold "alt" while clicking on the "Settings" button.
2 | // For documentation on these settings, see: https://aka.ms/terminal-documentation
3 | {
4 | "$schema": "https://aka.ms/terminal-profiles-schema",
5 | "defaultProfile": "{574e775e-4f2a-5b96-ac1e-a2962a402336}",
6 | "copyOnSelect": false,
7 | "copyFormatting": false,
8 | "profiles": {
9 | "defaults": {
10 | "fontSize": 10,
11 | "fontFace": "Source Code Pro",
12 | "useAcrylic": true,
13 | "acrylicOpacity": 1.0,
14 | "backgroundImage": "C:\\Users\\sorah\\Pictures\\fault\\20170528.png",
15 | "backgroundImageOpacity": 0.12,
16 | "backgroundImageStretchMode": "uniformToFill",
17 | "cursorShape": "filledBox"
18 | },
19 | "list": [
20 | {
21 | // Make changes here to the powershell.exe profile
22 | "guid": "{61c54bbd-c2c6-5271-96e7-009a87ff44bf}",
23 | "name": "Windows PowerShell",
24 | "commandline": "powershell.exe",
25 | "hidden": false
26 | },
27 | {
28 | // Make changes here to the cmd.exe profile
29 | "guid": "{0caa0dad-35be-5f56-a8ff-afceeeaa6101}",
30 | "name": "cmd",
31 | "commandline": "cmd.exe",
32 | "hidden": false
33 | },
34 | {
35 | "guid": "{574e775e-4f2a-5b96-ac1e-a2962a402336}",
36 | "hidden": false,
37 | "name": "PowerShell Core",
38 | "source": "Windows.Terminal.PowershellCore",
39 | "fontSize": 10,
40 | "fontFace": "Source Code Pro",
41 | "useAcrylic": true,
42 | "acrylicOpacity": 1.0,
43 | "backgroundImage": "C:\\Users\\sorah\\Pictures\\fault\\20170528.png",
44 | "backgroundImageOpacity": 0.12,
45 | "backgroundImageStretchMode": "uniformToFill",
46 | "cursorShape": "filledBox"
47 | },
48 | {
49 | "guid": "{b453ae62-4e3d-5e58-b989-0a998ec441b8}",
50 | "hidden": false,
51 | "name": "Azure Cloud Shell",
52 | "source": "Windows.Terminal.Azure"
53 | },
54 | {
55 | "guid": "{0a03b670-8da0-0138-2eb6-00155d830505}",
56 | "hidden": false,
57 | "name": "WSL subsystemctl shell",
58 | "commandline": "wsl subsystemctl shell",
59 | "fontSize": 10,
60 | "fontFace": "Source Code Pro",
61 | "useAcrylic": true,
62 | "acrylicOpacity": 1.0,
63 | "backgroundImage": "C:\\Users\\sorah\\Pictures\\fault\\20170528.png",
64 | "backgroundImageOpacity": 0.08,
65 | "cursorShape": "filledBox"
66 | },
67 | {
68 | "guid": "{75def201-4efb-5e32-93a0-c5647120c025}",
69 | "hidden": false,
70 | "name": "ubuntu",
71 | "source": "Windows.Terminal.Wsl"
72 | }
73 | ]
74 | }
75 | // Add custom color schemes to this array
76 | "schemes": [],
77 | // Add any keybinding overrides to this array.
78 | // To unbind a default keybinding, set the command to "unbound"
79 | "keybindings": [
80 | {
81 | "command": "copy",
82 | "keys": [
83 | "alt+c"
84 | ]
85 | },
86 | {
87 | "command": "paste",
88 | "keys": [
89 | "alt+v"
90 | ]
91 | },
92 | { "command": "find", "keys": "ctrl+shift+f" },
93 | ]
94 | }
95 |
--------------------------------------------------------------------------------
/bin/sorah-aws-ec2-peer-vpc:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'aws-sdk-ec2'
3 |
4 | ForeignVpc = Struct.new(:vpc_id)
5 |
6 | def vpc_name(v)
7 | (v.tags.find { |_| _.key == 'ShortName' } || v.tags.find { |_| _.key == 'Name' })&.value || v.vpc_id
8 | end
9 |
10 | # Aws.config[:logger] = Logger.new($stderr)
11 |
12 | default_region = ENV['AWS_REGION'] || ENV['AWS_DEFAULT_REGION'] || 'ap-northeast-1'
13 |
14 | vpc_region_i, vpc_id_i, _ = ARGV[0].split(?/, 3).yield_self { |(r,i,o)| i ? [r, i, o] : [default_region, r, o] }
15 | vpc_region_r, vpc_id_r, vpc_owner_r = ARGV[1].split(?/, 3).yield_self { |(r,i,o)| i ? [r, i, o] : [default_region, r, o] }
16 |
17 | ec2_i = Aws::EC2::Client.new(region: vpc_region_i)
18 | ec2_r = Aws::EC2::Client.new(region: vpc_region_r)
19 |
20 | vpc_i, vpc_r = [[ec2_i, vpc_id_i, nil], [ec2_r, vpc_id_r, vpc_owner_r]].map do |ec2, vpc_id, owner|
21 | if owner
22 | next ForeignVpc.new(vpc_id)
23 | end
24 |
25 | case vpc_id
26 | when /\Avpc-[a-z0-9]+\z/
27 | ec2.describe_vpcs(vpc_ids: [vpc_id]).vpcs[0]
28 | else
29 | ec2.describe_vpcs(filters: [name: 'tag:Name', values: [vpc_id]]).vpcs[0]
30 | end
31 | end
32 |
33 |
34 | puts "=> VPC data"
35 | puts
36 | {requester: [vpc_region_i, vpc_i], accepter: [vpc_region_r, vpc_r]}.each do |k, (r,v)|
37 | if v.is_a?(ForeignVpc)
38 | puts " #{k} VPC: #{v.vpc_id}"
39 | puts
40 | next
41 | end
42 | name = vpc_name(v)
43 |
44 | puts " #{k} VPC:"
45 | puts " * Region: #{r}"
46 | puts " * ID: #{v.vpc_id}"
47 | puts " * Name: #{name}"
48 | puts
49 | end
50 |
51 | puts "=> CreateVpcPeeringConnection Request to #{vpc_region_i}, will be the following:"
52 |
53 | req = {
54 | vpc_id: vpc_i.vpc_id,
55 | peer_vpc_id: vpc_r.vpc_id,
56 | peer_region: vpc_region_r,
57 | }
58 |
59 | req[:peer_owner_id] = vpc_owner_r if vpc_owner_r
60 |
61 | puts
62 | puts " * #{req.inspect}"
63 | puts
64 |
65 | unless ARGV[2]
66 | puts "=> Are you sure to proceed?"
67 |
68 | print " (type 'yes'): "
69 | unless $stdin.gets.chomp == 'yes'
70 | abort "Aborted"
71 | end
72 |
73 | puts
74 | puts "=> Requesting:"
75 | puts
76 |
77 | res = ec2_i.create_vpc_peering_connection(req)
78 |
79 | puts
80 | puts " * #{res.inspect}"
81 | puts
82 |
83 | id = res.vpc_peering_connection.vpc_peering_connection_id
84 | else
85 | id = ARGV[2]
86 | end
87 |
88 | puts "=> Checking for status of #{id}"
89 |
90 | accepted = false
91 | loop do
92 | sleep 1
93 | begin
94 | conn = ec2_r.describe_vpc_peering_connections(vpc_peering_connection_ids: [id]).vpc_peering_connections[0]
95 | rescue Aws::EC2::Errors::InvalidVpcPeeringConnectionIDNotFound
96 | puts " * Waiting for the connection to appear in #{vpc_region_r}"
97 | sleep 2
98 | retry
99 | end
100 |
101 | puts " * #{conn.status.code}: #{conn.status.message.inspect}"
102 |
103 | if conn.status.code == 'pending-acceptance' && !accepted && !vpc_owner_r
104 | puts "=> Accepting pcx"
105 |
106 | ec2_r.accept_vpc_peering_connection(vpc_peering_connection_id: id)
107 | accepted = true
108 |
109 | puts " * Done"
110 |
111 | puts "=> Checking for status of #{id} ..."
112 | end
113 |
114 | if conn.status.code == 'active'
115 | break
116 | end
117 |
118 | if conn.status.code == 'failed'
119 | puts " ! Something went wrong..."
120 | exit 1
121 | end
122 | end
123 |
124 | puts "=> Creating tag on the pcxs"
125 | tag = {key: 'Name', value: "#{vpc_name(vpc_i)}/#{vpc_name(vpc_r)}"}
126 | puts " #{tag.inspect}"
127 |
128 | {vpc_region_i => ec2_i, vpc_region_r => ec2_r}.each do |region, ec2|
129 | print " * #{region} ..."
130 | ec2.create_tags(resources: [id], tags: [tag])
131 | puts " [ok]"
132 | end
133 |
134 | puts "=> All set!"
135 | puts
136 | {vpc_region_i => vpc_i, vpc_region_r => vpc_r}.each do |region, vpc|
137 | puts "# #{vpc_name(vpc)} (#{region})"
138 | vpc.cidr_block_association_set.each do |block|
139 | puts block.cidr_block
140 | puts "route destination_cidr_block: '#{block.cidr_block}', vpc_peering_connection_id: '#{id}' # #{vpc_name(vpc)} (#{region})"
141 | end
142 | puts
143 | end
144 |
--------------------------------------------------------------------------------
/bin/sorah-maintain-bastion-host-pubkey:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | NAME = "nkmi-bastion"
3 | FILE = File.expand_path("~/.ssh/known_hosts")
4 | keys = {
5 | kotoha: < "
12 | echo "NAME=${name}"
13 | exit 1
14 | fi
15 |
16 | set -ex
17 | export GPG_TTY=$(tty)
18 | here="$(pwd)"
19 |
20 | out_dir=${OUT_DIR}
21 | if [[ -z $out_dir ]]; then
22 | out_dir=debian/out
23 | fi
24 |
25 | mkdir -p $out_dir
26 |
27 | if [ -n "${NO_CACHE}" ]; then
28 | NO_CACHE=--no-cache
29 | fi
30 |
31 | if [ -e debian/Dockerfile.${dist} ]; then
32 | image_tag=debuild-${name}-${dist}
33 | docker buildx build --build-arg BUILDDATE=$(date +%Y%m%d) -f debian/Dockerfile.${dist} -t "$image_tag" "${here}/debian" --load $NO_CACHE
34 | else
35 | image_tag=debuild-common-${dist}
36 | docker buildx build --build-arg BUILDDATE=$(date +%Y%m%d) -f ~/git/config/debuild-docker/Dockerfile.${dist} -t "$image_tag" --load ~/git/config/debuild-docker $NO_CACHE
37 | fi
38 |
39 | source_archive=".tmp_$$_${name}.tar.gz"
40 | git archive --format=tar --prefix "${name}-${version}/" HEAD | gzip > "${out_dir}/$source_archive"
41 |
42 | if [ -n "${version}" ]; then
43 | orig_archive="${name}_${version}.orig.tar.gz"
44 | if [ ! -e $orig_archive ]; then
45 | version_tag=$(ruby -e 'puts ARGV.first.tr("~:","_%")' "${version}")
46 | git archive --format=tar --prefix "${name}-${version}/" "upstream/${version_tag}" | gzip > "${out_dir}/$orig_archive"
47 | fi
48 | fi
49 |
50 | bootstrap_cmds=$(mktemp)
51 | cat >${bootstrap_cmds} <<-EOF
52 | useradd -u $(id -u) -m buildbot
53 | rm -fv /var/log/lastlog
54 | ln -sf /dev/null /var/log/lastlog
55 | chown buildbot:buildbot /build
56 | chown buildbot:buildbot /home/buildbot
57 | cp /run.sh /run2.sh
58 | chown buildbot:buildbot /run2.sh
59 |
60 | if [ ! -e /control ]; then
61 | mk-build-deps -r -i -t 'apt-get -y -o Debug::pkgProblemResolver=yes --no-install-recommends' /debian/control
62 | fi
63 | su buildbot -c 'bash -xe /run2.sh'
64 |
65 | echo "\$(basename -s .dsc /build/*.dsc)" > /output/.last-$$
66 | mkdir /output/.$$
67 | ls /build
68 | cat */debian/files
69 | cat */debian/files | cut -d' ' -f1 | xargs cp -p -v -t /output/
70 | cp -pv /build/*.{dsc,changes} /output/
71 | cp -pv /build/*.debian.tar* /output/ || :
72 | cp -pv /build/*.tar* /output/ || :
73 | cp -v /build/*/debian/*.symbols /output/.$$/ || :
74 | chown $(id -u):$(id -g) /output/*
75 | chown -R $(id -u):$(id -g) /output/.$$
76 | chown -R $(id -u):$(id -g) /output/.last-$$
77 | EOF
78 |
79 | cmds=$(mktemp)
80 | cat >${cmds} <<-EOF
81 | #gpg2 --import < /pub.asc
82 | #export DEBSIGN_KEYID=0x\$(gpg2 --with-colons --list-keys|grep '^sub:'|head|cut -d: -f 5)
83 |
84 | cd /build
85 | tar xf /output/${source_archive}
86 | ln -s /output/${source_archive} /build/
87 | if [ -e /output/${orig_archive} ]; then
88 | ln -s /output/${orig_archive} /build/
89 | fi
90 | mv "${name}-${version}" ${name}
91 | cd "${name}"
92 | #rm -rf debian
93 | #cp -a /debian .
94 | #rm -rf ./debian/out
95 |
96 | sed -i -e 's/_dist_/${dist}/g' debian/changelog
97 | sed -i -e 's/~dist) unstable; urgency=/~${dist}) ${dist}; urgency=/g' debian/changelog
98 | sed -i -e 's/~dist) unstable-proposed; urgency=/~${dist}) ${dist}-proposed; urgency=/g' debian/changelog
99 | sed -i -e 's/~dist)/~${dist})/g' debian/changelog
100 |
101 | debuild -us -uc
102 | EOF
103 |
104 | # --volume "${gpg_public_key}:/pub.asc" \
105 | docker run \
106 | --privileged \
107 | --net=host \
108 | --tty \
109 | --interactive \
110 | --volume "${here}/debian:/debian:ro" \
111 | --volume "${here}/${out_dir}:/output" \
112 | --volume "${cmds}:/run.sh:ro" \
113 | --volume "${bootstrap_cmds}:/bootstrap.sh:ro" \
114 | -e DEBSIGN_KEYID \
115 | -e GPGUSER \
116 | --workdir /build \
117 | "$image_tag" \
118 | bash -xe /bootstrap.sh
119 |
120 | cp -v ${out_dir}/.$$/*.symbols debian/ || :
121 |
122 | if [[ -z $NO_SIGN ]]; then
123 | sorah-debsign ${out_dir}/$(cat ${out_dir}/.last-$$)*changes
124 |
125 | tag="$(ruby -e 'dist = ARGV.first; puts File.open("debian/changelog", "r", &:gets).match(/\((.+?)\)/)[1].sub(/~dist/, "~#{dist}").gsub(/\.(?=\.|$|lock$)/, ".#").tr("~:","_%")' "${dist}")"
126 | deb_version="$(head -n1 debian/changelog|grep --only-matching -P '(?<=\().+?(?=\))'|sed -e "s/~dist/~${dist}/g")"
127 | fi
128 | if [[ -z $NO_TAG ]]; then
129 | git tag -s -m "${name} Debian release ${deb_version}" debian/${tag}
130 | git show --summary debian/${tag} | cat
131 | fi
132 |
133 |
--------------------------------------------------------------------------------
/win/fontcss.rb:
--------------------------------------------------------------------------------
1 | # override several font-family with my favorite fonts
2 |
3 | FONT_TO_OVERRIDE = [
4 | "メイリオ",
5 | "Meiryo",
6 | "Meiryo UI",
7 | "游ゴシック",
8 | "MS Pゴシック",
9 | "MS PGothic",
10 | "MS ゴシック",
11 | "MS Gothic",
12 | "MS UI Gothic",
13 | "Open Sans",
14 | "MS Pゴシック",
15 | "Hiragino Sans",
16 | "Hiragino Kaku Gothic Pro",
17 | "Hiragino Maru Gothic Pro",
18 | ]
19 | VF = %w(Noto-Sans-JP)
20 | SRC = [
21 | 'NotoSansJP',
22 | 'NotoSansCJKjp',
23 | ]
24 | WEIGHTS = {
25 | 'Thin' => 100,
26 | 'ExtraLight' => 200,
27 | 'Light' => 300,
28 | 'Regular' => 400,
29 | 'Medium' => 500,
30 | 'SemiBold' => 600,
31 | 'Bold' => 700,
32 | 'ExtraBold' => 800,
33 | 'Black' => 900,
34 | }
35 |
36 | FONT_TO_OVERRIDE.each do |font_spec|
37 | WEIGHTS.each do |psname, weight|
38 | locals = [*VF, *SRC.map { |local| "#{local}-#{psname}" }]
39 | puts <<~EOF
40 | @font-face {
41 | font-family: "#{font_spec}";
42 | font-weight: #{weight};
43 | src: #{locals.map { |local| %|local("#{local}")| }.join(', ')}
44 | }
45 | EOF
46 | end
47 | locals = [*VF, *SRC.map { |local| "#{local}-Regular" }]
48 | puts <<~EOF
49 | @font-face {
50 | font-family: "#{font_spec}";
51 | src: #{locals.map { |local| %|local("#{local}")| }.join(', ')}
52 | }
53 | EOF
54 | end
55 |
56 | puts <<~EOF
57 | @font-face {
58 | font-family: "Segoe UI";
59 | font-weight: 400;
60 | font-style: normal;
61 | src: local("Noto-Sans-JP"), local("NotoSansJP-Regular"), local("NotoSansCJKjp-Regular"), local("SourceHanSans-Regular"), local("Noto Sans CJK JP"), local("BIZ-UDPGothic"), local("YuGothic-Medium");
62 | unicode-range: U+30??, U+3400-4DB5, U+4E00-9FCB, U+F900-FA6A, U+2E80-2FD5, U+FF5F-FF9F, U+31F0-31FF, U+3220-3243, U+3280-337F, U+FF01-FF5E;
63 | }
64 |
65 | @font-face {
66 | font-family: "Segoe UI";
67 | font-weight: 200;
68 | font-style: normal;
69 | src: local("SegoeUI-Light"), local("SourceHanSans-Light"), local("Noto-Sans-JP"), local("NotoSansJP-Light"), local("NotoSansCJKjp-Light");
70 | }
71 | @font-face {
72 | font-family: "Segoe UI";
73 | font-weight: 300;
74 | font-style: normal;
75 | src: local("SegoeUI-Semilight"), local("SourceHanSans-Semilight"), local("Noto-Sans-JP"), local("NotoSansJP-Medium"), local("NotoSansCJKjp-Medium");
76 | }
77 | @font-face {
78 | font-family: "Segoe UI";
79 | font-weight: 400;
80 | font-style: normal;
81 | src: local("SegoeUI"), local("SourceHanSans-Regular"), local("Noto-Sans-JP"), local("NotoSansJP-Regular"), local("NotoSansCJKjp-Regular");
82 | }
83 | @font-face {
84 | font-family: "Segoe UI";
85 | font-weight: 600;
86 | font-style: normal;
87 | src: local("SegoeUI-Semibold"), local("SourceHanSans-SemiBold"), local("Noto-Sans-JP"), local("NotoSansJP-SemiBold"), local("NotoSansCJKjp-SemiBold");
88 | }
89 | @font-face {
90 | font-family: "Segoe UI";
91 | font-weight: 700;
92 | font-style: normal;
93 | src: local("SegoeUI-Bold"), local("SourceHanSans-Bold"),local("Noto-Sans-JP"), local("NotoSansJP-Bold"), local("NotoSansCJKjp-Bold");;
94 | }
95 |
96 | @font-face {
97 | font-family: "Segoe UI";
98 | font-weight: 200;
99 | font-style: italic;
100 | src: local("SegoeUI-LightItalic"), local("SourceHanSans-Light"), local("Noto-Sans-JP"), local("NotoSansJP-Light"), local("NotoSansCJKjp-Light");
101 | }
102 | @font-face {
103 | font-family: "Segoe UI";
104 | font-weight: 300;
105 | font-style: italic;
106 | src: local("SegoeUI-SemilightItalic"), local("SourceHanSans-Semilight"), local("Noto-Sans-JP"), local("NotoSansJP-Medium"), local("NotoSansCJKjp-Medium");
107 | }
108 | @font-face {
109 | font-family: "Segoe UI";
110 | font-weight: 400;
111 | font-style: italic;
112 | src: local("SegoeUI-Italic"), local("SourceHanSans-Regular"), local("Noto-Sans-JP"), local("NotoSansJP-Regular"), local("NotoSansCJKjp-Regular");
113 | }
114 | @font-face {
115 | font-family: "Segoe UI";
116 | font-weight: 600;
117 | font-style: italic;
118 | src: local("SegoeUI-SemiboldItalic"), local("SourceHanSans-SemiBold"), local("Noto-Sans-JP"), local("NotoSansJP-SemiBold"), local("NotoSansCJKjp-SemiBold");
119 | }
120 | @font-face {
121 | font-family: "Segoe UI";
122 | font-weight: 700;
123 | font-style: italic;
124 | src: local("SegoeUI-BoldItalic"), local("SourceHanSans-Bold"), local("Noto-Sans-JP"), local("NotoSansJP-Bold"), local("NotoSansCJKjp-Bold");;
125 | }
126 | EOF
127 |
--------------------------------------------------------------------------------
/memo/avsystem.dot:
--------------------------------------------------------------------------------
1 | digraph {
2 | newrank=true;
3 | fontname="Lato";
4 | node [fontname="Lato"];
5 | subgraph cluster_source_pc {
6 | label="PC";
7 | subgraph cluster_source_pc_gpu {
8 | label="gpu";
9 | pcHDMI [label="HDMI"];
10 | pcDVIa [label="DVI 1"];
11 | pcDVIb [label="DVI 2"];
12 |
13 | }
14 | subgraph cluster_source_pc_amp {
15 | label="AT-HA40USB";
16 | pcAmp [label="in"];
17 | pcAmpSPDIF [label="Line Out (S/PDIF)"];
18 | pcAmpLINE [label="Line Out"];
19 | pcAmpOut [label="Headphone Out"];
20 | }
21 | }
22 | subgraph cluster_source_mb {
23 | label="MacBook";
24 | mbDP [label="DP"];
25 | mbUSBmix [label="Mixer USB"];
26 | }
27 | {
28 | rank=same;
29 | pcHDMI; pcDVIa; pcDVIb; pcAmpSPDIF; pcAmpLINE; pcAmpOut; mbDP; mbUSBmix;
30 | switch [label="Switch"];
31 | ps4 [label="PS4"];
32 | ps3 [label="PS3"];
33 | chrome [label="Chromecast"];
34 | apple [label="Apple TV"];
35 | }
36 | subgraph cluster_amp {
37 | label="amp";
38 | subgraph cluster_amp_in {
39 | label="in";
40 | subgraph cluster_amp_in_ps4 {
41 | label="PS4";
42 | ampHDMIbd [label="HDMI (BD)"]
43 | }
44 | subgraph cluster_amp_in_switch {
45 | label="Switch";
46 | ampHDMIdvd [label="HDMI (DVD)"];
47 | }
48 | subgraph cluster_amp_in_pc {
49 | label="PC";
50 | ampHDMIgame [label="HDMI (GAME)"];
51 | ampSPDIFa [label="S/PDIF 1"];
52 | }
53 | subgraph cluster_amp_in_chrome {
54 | label="Chromecast";
55 | ampHDMIv4 [label="HDMI (VIDEO 4)"];
56 | }
57 | subgraph cluster_amp_in_apple {
58 | label="Apple TV";
59 | ampHDMIv3 [label="HDMI (VIDEO 3)"];
60 | }
61 | subgraph cluster_amp_in_ps3 {
62 | label="PS3";
63 | ampHDMIcd [label="HDMI (SA-CD/CD)"]
64 | }
65 | subgraph cluster_amp_in_front {
66 | label="front";
67 | ampRCAv2 [label="RCA (VIDEO 2)"];
68 | }
69 | }
70 | subgraph cluster_amp_out {
71 | label="out";
72 | rank=same;
73 | ampoutHDMIa [label="HDMI A"];
74 | ampoutHDMIb [label="HDMI B"];
75 | ampoutSpeaker [label="Speaker Out"];
76 | ampoutHeadphone [label="Headphone Out"];
77 | }
78 |
79 | amp
80 |
81 | }
82 | subgraph cluster_mix {
83 | label="Audio Mixer";
84 | {
85 | rank=same;
86 | mixInMic1 [label="In 1 (Mic)"];
87 | mixIn23 [label="In 2/3"];
88 | mixIn45 [label="In 4/5"];
89 | }
90 | {
91 | rank=same;
92 | mix [label="Mixer"];
93 | mixTrack2 [label="Track 2"];
94 | }
95 | {
96 | rank=same;
97 | mixPhones [label="Phones"];
98 | mixOut [label="Main Out"];
99 | }
100 | }
101 |
102 | {rank=same; amp; mix; mixTrack2; }
103 | {
104 | rank=same;
105 | ampHDMIbd; ampHDMIdvd; ampHDMIgame; ampSPDIFa; ampHDMIv4; ampHDMIv3; ampHDMIcd; ampRCAv2;
106 | mixInMic1; mixIn23; mixIn45;
107 | }
108 | subgraph cluster_mon {
109 | subgraph cluster_mon0 {
110 | label="21\"HD";
111 | mon21hdmi [label="HDMI"];
112 | mon21dvi [label="DVI"];
113 | }
114 | subgraph cluster_mon1 {
115 | label="27\"WQHD";
116 | mon27dvi [label="DVI"];
117 | mon27dp [label="DP"];
118 | }
119 | subgraph cluster_mon2 {
120 |
121 | label="24\"HD";
122 | mon24hdmi [label="HDMI"];
123 | }
124 | }
125 | subgraph cluster_snd {
126 | sndHeadset [label="Headset (in)"];
127 | sndSpeaker [label="Speaker"];
128 | }
129 |
130 | {
131 | rank=same;
132 | mon21hdmi; mon21dvi; mon27dvi; mon27dp; mon24hdmi; sndHeadset; sndSpeaker;
133 | }
134 |
135 |
136 | {
137 | # Video/Audio
138 | edge [style=bold];
139 | ps4 -> ampHDMIbd;
140 | switch -> ampHDMIdvd;
141 | chrome -> ampHDMIv4;
142 | apple -> ampHDMIv3;
143 | ps3 -> ampHDMIcd;
144 |
145 | ampHDMIv4 -> amp;
146 | ampHDMIv3 -> amp;
147 | ampHDMIcd -> amp;
148 | ampHDMIbd -> amp;
149 | ampHDMIdvd -> amp;
150 |
151 | pcHDMI -> ampHDMIgame;
152 | }
153 | {
154 | # Video
155 | edge [color="#008899"];
156 | pcDVIa -> mon21dvi;
157 | ampoutHDMIa -> mon21hdmi;
158 | pcDVIb -> mon27dvi;
159 | mbDP -> mon27dp;
160 | ampoutHDMIb -> mon24hdmi;
161 |
162 | ampHDMIgame -> amp;
163 |
164 | amp -> ampoutHDMIa;
165 | amp -> ampoutHDMIb;
166 | }
167 | {
168 | # Audio
169 | edge [color="#bb0000"];
170 | ampoutSpeaker -> sndSpeaker;
171 |
172 | pcAmp -> pcAmpLINE;
173 | pcAmp -> pcAmpSPDIF;
174 | pcAmp -> pcAmpOut;
175 | pcAmpLINE -> mixIn45;
176 | pcAmpSPDIF -> ampSPDIFa;
177 |
178 | ampoutHeadphone -> mixIn23 [style=dashed];
179 |
180 | ampRCAv2 -> amp;
181 | ampSPDIFa -> amp;
182 | amp -> ampoutSpeaker;
183 | amp -> ampoutHeadphone;
184 |
185 | mbUSBmix -> mixTrack2;
186 | mixIn23 -> mix;
187 | mixIn45 -> mix;
188 | mixInMic1 -> mix;
189 | mix -> mixOut;
190 | mixOut -> mixPhones;
191 | mixTrack2 -> mixOut;
192 | mix -> mbUSBmix;
193 | mixPhones -> sndHeadset;
194 | mixOut -> ampRCAv2;
195 | }
196 | }
197 |
--------------------------------------------------------------------------------
/bin/sorah-debuild-codebuild:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | REGION = 'us-west-2'
3 | PROJECT = 'sorah-debuild'
4 |
5 | BUCKET = 'sorah-codebuild'
6 | SOURCE_PREFIX = 'sources/'
7 |
8 | BUILDSPEC = File.read(File.join(__dir__, '..', 'debuild-docker', 'buildspec.yml'))
9 |
10 | require 'uri'
11 | require 'fileutils'
12 | require 'aws-sdk-s3'
13 | require 'aws-sdk-codebuild'
14 | require 'optparse'
15 |
16 |
17 | if ARGV.size < 1
18 | abort "usage: #$0 docker_repo:distro [upstream_version] [--arm] [--debian-ref=DEBIAN_TAG]"
19 | end
20 |
21 | arm = false
22 | debian_ref = 'HEAD'
23 |
24 | argv = OptionParser.new do |o|
25 | o.on('-a', "--arm", "Use ARM_CONTAINER") do
26 | arm = true
27 | end
28 | o.on('-d', "--debian-ref", "Use specified git ref for a debian source (default=HEAD)") do |r|
29 | debian_ref = r
30 | end
31 | end.parse(ARGV)
32 |
33 | image, upstream_version = argv[0,2]
34 |
35 | unless image.include?(':')
36 | repo = case image
37 | when 'stretch', 'buster', 'bullseye', 'bookworm', 'trixie'
38 | 'public.ecr.aws/debian/debian'
39 | when 'trusty', 'xenial', 'bionic', 'focal', 'jammy', 'noble'
40 | 'public.ecr.aws/ubuntu/ubuntu'
41 | else
42 | raise "Unknown shorthand to determine Docker image repository: #{image}"
43 | end
44 | image = "#{repo}:#{image}"
45 | end
46 |
47 | distro = image.split(?:,2)[1]
48 |
49 | puts "===> PID: #$$"
50 |
51 | @s3 = Aws::S3::Client.new(region: REGION)
52 | @codebuild = Aws::CodeBuild::Client.new(region: REGION)
53 |
54 | unless File.exist?('debian')
55 | abort "Run in debian package directory root"
56 | end
57 |
58 | source_zip_directory = "debian/out/codebuild-src-#$$"
59 | source_name = File.read('debian/control').each_line.lazy.map { |_| _.match(/^Source:\s+(.+?)$/) }.find(&:itself)[1]
60 |
61 | FileUtils.mkdir_p(source_zip_directory)
62 |
63 | puts "===> Archiving #{debian_ref}"
64 | File.open(File.join(source_zip_directory, 'source.tar'), 'wb') do |io|
65 | system("git", "archive", "--format=tar", debian_ref, out: io, exception: true)
66 | end
67 |
68 | if upstream_version
69 | orig_name = "#{source_name}_#{upstream_version}.orig.tar.gz"
70 | unless File.exist?(File.join('debian/out', orig_name))
71 | upstream_version_tag = "upstream/#{upstream_version.tr('~:', '_%')}"
72 | puts "===> Archiving #{upstream_version_tag}"
73 | File.open(File.join('debian/out', orig_name), 'wb') do |f|
74 | IO.popen(["git", "archive", "--format=tar", "--prefix", "#{source_name}-#{upstream_version}/", upstream_version_tag], 'rb') do |io|
75 | system("gzip", in: io, out: f, exception: true)
76 | end
77 | raise unless $?.success?
78 | end
79 | end
80 | FileUtils.cp File.join('debian/out', orig_name), File.join(source_zip_directory, orig_name)
81 | end
82 |
83 | puts "===> zip source directory"
84 |
85 | zip_path = File.expand_path("./debian/out/codebuild-src-#$$.zip")
86 | Dir.chdir(source_zip_directory) do
87 | system("zip", "-r", zip_path, '.', exception: true)
88 | end
89 |
90 | puts "===> uploading source"
91 |
92 | source_key = "#{SOURCE_PREFIX}#{PROJECT}.zip"
93 | puts " * Bucket: #{BUCKET}"
94 | puts " * Key: #{source_key}"
95 |
96 | source_version = File.open(zip_path, 'rb') do |io|
97 | @s3.put_object(
98 | bucket: BUCKET,
99 | key: source_key,
100 | body: io,
101 | ).version_id
102 | end
103 |
104 | puts " [ ok ] version=#{source_version}"
105 |
106 | File.unlink zip_path
107 | FileUtils.remove_entry_secure source_zip_directory
108 |
109 | puts
110 | puts "===> Starting build"
111 |
112 | build = @codebuild.start_build(
113 | project_name: PROJECT,
114 | source_version: source_version,
115 | environment_variables_override: [
116 | { type: 'PLAINTEXT', name: 'DEBIAN_FRONTEND', value: 'noninteractive' },
117 | { type: 'PLAINTEXT', name: 'DEBUILD_CODEBUILD_SOURCE', value: source_name },
118 | { type: 'PLAINTEXT', name: 'DEBUILD_CODEBUILD_BUILD', value: arm ? 'any' : 'full' },
119 | { type: 'PLAINTEXT', name: 'DEBUILD_CODEBUILD_DIST', value: distro },
120 | ],
121 | compute_type_override: 'BUILD_GENERAL1_LARGE',
122 | environment_type_override: arm ? 'ARM_CONTAINER' : 'LINUX_CONTAINER',
123 | image_override: image,
124 | buildspec_override: BUILDSPEC,
125 | idempotency_token: "#{File.basename($0)}-#{ENV['USER']}-#$$",
126 | ).build
127 |
128 | puts " * ARN: #{build.arn}"
129 | puts " * Log: https://console.aws.amazon.com/codesuite/codebuild/projects/#{URI.encode_www_form_component(build.project_name)}/build/#{URI.encode_www_form_component(build.id)}/log?region=#{REGION}"
130 |
131 | sleep 2
132 | puts
133 | puts "===> Waiting build to complete..."
134 |
135 | loop do
136 | build = @codebuild.batch_get_builds(ids: [build.id]).builds[0]
137 | if build
138 | puts " * status: #{build.build_status}, phase: #{build.current_phase}"
139 | break if build.build_status != 'IN_PROGRESS'
140 | else
141 | puts " * build not found"
142 | end
143 | sleep 5
144 | end
145 |
146 | unless build.build_status == 'SUCCEEDED'
147 | raise "build not succeeded"
148 | end
149 |
150 | puts
151 | puts "===> Downloading artifacts"
152 |
153 | File.open("./debian/out/codebuild-out-#$$.zip", 'wb') do |io|
154 | m = build.artifacts.location.match(%r{\Aarn:aws:s3:::(.+?)/(.+?)\z})
155 | unless m
156 | raise "artifact location not supported"
157 | end
158 | @s3.get_object(
159 | bucket: m[1],
160 | key: m[2],
161 | response_target: io,
162 | )
163 | end
164 |
165 | puts
166 | puts "===> Unzip and debsign"
167 |
168 | FileUtils.mkdir_p("debian/out/codebuild-out-#$$")
169 | Dir.chdir("debian/out/codebuild-out-#$$") do
170 | system("unzip", "../codebuild-out-#$$.zip", exception: true)
171 | File.unlink "../codebuild-out-#$$.zip"
172 | Dir['./*.changes'].each do |changes|
173 | system("sorah-debsign", changes, exception: true)
174 | end
175 | end
176 |
177 | (Dir["./debian/out/codebuild-out-#$$/*"] + Dir["./debian/out/codebuild-out-#$$/.*"]).each do |file|
178 | next if file.end_with?('/.') || file.end_with?('/..')
179 | File.rename file, "./debian/out/#{File.basename(file)}"
180 | end
181 |
182 | Dir.rmdir "./debian/out/codebuild-out-#$$"
183 |
--------------------------------------------------------------------------------
/linux/x/i3/config:
--------------------------------------------------------------------------------
1 | # This file has been auto-generated by i3-config-wizard(1).
2 | # It will not be overwritten, so edit it as you like.
3 | #
4 | # Should you change your keyboard layout some time, delete
5 | # this file and re-run i3-config-wizard(1).
6 |
7 | exec --no-startup-id picom
8 | exec --no-startup-id xss-lock --transfer-sleep-lock -- sorah-i3lock -n
9 | exec --no-startup-id dunst
10 |
11 | # i3 config file (v4)
12 | #
13 | # Please see http://i3wm.org/docs/userguide.html for a complete reference!
14 |
15 | set $mod Mod4
16 |
17 | # Font for window titles. Will also be used by the bar unless a different font
18 | # is used in the bar {} block below.
19 | font pango:OpenSans 8
20 |
21 | # This font is widely installed, provides lots of unicode glyphs, right-to-left
22 | # text rendering and scalability on retina/hidpi displays (thanks to pango).
23 | #font pango:DejaVu Sans Mono 8
24 |
25 | # Before i3 v4.8, we used to recommend this one as the default:
26 | # font -misc-fixed-medium-r-normal--13-120-75-75-C-70-iso10646-1
27 | # The font above is very space-efficient, that is, it looks good, sharp and
28 | # clear in small sizes. However, its unicode glyph coverage is limited, the old
29 | # X core fonts rendering does not support right-to-left and this being a bitmap
30 | # font, it doesn’t scale on retina/hidpi displays.
31 |
32 | # Use Mouse+$mod to drag floating windows to their wanted position
33 | floating_modifier $mod
34 |
35 | # start a terminal
36 | bindsym $mod+Return exec env TERMINAL=alacritty i3-sensible-terminal
37 | bindsym $mod+Shift+Return exec alacritty -e bash
38 |
39 | # kill focused window
40 | bindsym $mod+Shift+q kill
41 |
42 | # start dmenu (a program launcher)
43 | bindsym $mod+d exec --no-startup-id i3-dmenu-desktop
44 |
45 | bindsym --release $mod+Print exec --no-startup-id "( sorah-gyazo | xargs xdg-open ) >/tmp/gyazo.log 2>&1"
46 | bindsym --release $mod+P exec --no-startup-id "( sorah-gyazo | xargs xdg-open ) >/tmp/gyazo.log 2>&1"
47 | bindsym --release $mod+Shift+P exec --no-startup-id "( sorah-gyazo2 | xargs xdg-open ) >/tmp/gyazo.log 2>&1"
48 | # There also is the (new) i3-dmenu-desktop which only displays applications
49 | # shipping a .desktop file. It is a wrapper around dmenu, so you need that
50 | # installed.
51 |
52 | # change focus
53 | bindsym $mod+h focus left
54 | bindsym $mod+j focus down
55 | bindsym $mod+k focus up
56 | bindsym $mod+l focus right
57 |
58 | # move focused window
59 | bindsym $mod+Shift+h move left
60 | bindsym $mod+Shift+j move down
61 | bindsym $mod+Shift+k move up
62 | bindsym $mod+Shift+l move right
63 |
64 | # alternatively, you can use the cursor keys:
65 | bindsym $mod+Shift+Left move left
66 | bindsym $mod+Shift+Down move down
67 | bindsym $mod+Shift+Up move up
68 | bindsym $mod+Shift+Right move right
69 |
70 | # split in horizontal orientation
71 | # bindsym $mod+h split h
72 | bindsym $mod+- split h
73 |
74 | # split in vertical orientation
75 | bindsym $mod+v split v
76 |
77 | # enter fullscreen mode for the focused container
78 | bindsym $mod+f fullscreen toggle
79 |
80 | # change container layout (stacked, tabbed, toggle split)
81 | bindsym $mod+s layout stacking
82 | bindsym $mod+w layout tabbed
83 | bindsym $mod+e layout toggle split
84 |
85 | # toggle tiling / floating
86 | bindsym $mod+Shift+space floating toggle
87 |
88 | # change focus between tiling / floating windows
89 | bindsym $mod+space focus mode_toggle
90 |
91 | # focus the parent container
92 | bindsym $mod+a focus parent
93 |
94 | # focus the child container
95 | #bindsym $mod+d focus child
96 |
97 | # switch to workspace
98 | bindsym $mod+1 workspace 1
99 | bindsym $mod+2 workspace 2
100 | bindsym $mod+3 workspace 3
101 | bindsym $mod+4 workspace 4
102 | bindsym $mod+5 workspace 5
103 | bindsym $mod+6 workspace 6
104 | bindsym $mod+7 workspace 7
105 | bindsym $mod+8 workspace 8
106 | bindsym $mod+9 workspace 9
107 | bindsym $mod+0 workspace 10
108 |
109 | # move focused container to workspace
110 | bindsym $mod+Shift+1 move container to workspace 1
111 | bindsym $mod+Shift+2 move container to workspace 2
112 | bindsym $mod+Shift+3 move container to workspace 3
113 | bindsym $mod+Shift+4 move container to workspace 4
114 | bindsym $mod+Shift+5 move container to workspace 5
115 | bindsym $mod+Shift+6 move container to workspace 6
116 | bindsym $mod+Shift+7 move container to workspace 7
117 | bindsym $mod+Shift+8 move container to workspace 8
118 | bindsym $mod+Shift+9 move container to workspace 9
119 | bindsym $mod+Shift+0 move container to workspace 10
120 |
121 | # reload the configuration file
122 | bindsym $mod+Shift+c reload
123 | # restart i3 inplace (preserves your layout/session, can be used to upgrade i3)
124 | bindsym $mod+Shift+r restart
125 | # exit i3 (logs you out of your X session)
126 | bindsym $mod+Shift+e exec "i3-nagbar -t warning -m 'You pressed the exit shortcut. Do you really want to exit i3? This will end your X session.' -b 'Yes, exit i3' 'i3-msg exit'"
127 |
128 | # resize window (you can also use the mouse for that)
129 | mode "resize" {
130 | # These bindings trigger as soon as you enter the resize mode
131 |
132 | # Pressing left will shrink the window’s width.
133 | # Pressing right will grow the window’s width.
134 | # Pressing up will shrink the window’s height.
135 | # Pressing down will grow the window’s height.
136 | bindsym j resize shrink width 10 px or 10 ppt
137 | bindsym k resize grow height 10 px or 10 ppt
138 | bindsym l resize shrink height 10 px or 10 ppt
139 | bindsym semicolon resize grow width 10 px or 10 ppt
140 |
141 | # same bindings, but for the arrow keys
142 | bindsym Left resize shrink width 10 px or 10 ppt
143 | bindsym Down resize grow height 10 px or 10 ppt
144 | bindsym Up resize shrink height 10 px or 10 ppt
145 | bindsym Right resize grow width 10 px or 10 ppt
146 |
147 | # back to normal: Enter or Escape
148 | bindsym Return mode "default"
149 | bindsym Escape mode "default"
150 | }
151 |
152 | bindsym $mod+r mode "resize"
153 |
154 | bindsym $mod+Mod1+l exec loginctl lock-session
155 |
156 | # Start i3bar to display a workspace bar (plus the system information i3status
157 | # finds out, if available)
158 | bar {
159 | status_command i3status --config ~/.i3/status.conf
160 | position bottom
161 | colors {
162 | background #000000FF
163 | }
164 | }
165 |
166 |
--------------------------------------------------------------------------------
/script/wakeup/wakeup.rb:
--------------------------------------------------------------------------------
1 | require 'logger'
2 | require 'socket'
3 | require 'thread'
4 | require 'json'
5 | require 'aws-sdk'
6 | require 'fluent-logger'
7 |
8 | @noop = ENV['NOOP'] == '1'
9 |
10 | $stdout.sync = true
11 |
12 | AWS_REGION = 'ap-northeast-1'
13 | AWS_PROFILE = 'cron-wakeup'
14 | CURRENT_TXT_S3_BUCKET = 'sorah-userland'
15 | CURRENT_TXT_S3_KEY = 'wakeup/current.txt'
16 |
17 | RESULT_S3_BUCKET = 'sorah-userland'
18 | RESULT_S3_PREFIX = 'wakeup/result'
19 |
20 | SQS_QUEUE_NAME = 'sorah-wakeup'
21 |
22 | LOGS_LOG_GROUP = 'sorah-wakeup'
23 | LOGS_LOG_STREAM = 'log'
24 |
25 | def tweet(*texts)
26 | Thread.new do
27 | begin
28 | logger = Fluent::Logger::FluentLogger.new('twitter', host: 'boston.her', port: 19248)
29 | texts.each do |text|
30 | puts "TWEET: #{text}"
31 | logger.post('sorah-wakeup', message: text)
32 | end
33 | rescue Exception => e
34 | warn "Error while tweeting: #{e.inspect}\n\t#{e.backtrace.join("\n\t")}"
35 | end
36 | end
37 | end
38 |
39 | def latest_sequence_token
40 | @logs_lock.synchronize do
41 | @logs.describe_log_streams(log_group_name: LOGS_LOG_GROUP, log_stream_name_prefix: LOGS_LOG_STREAM).log_streams.find { |_| _.log_stream_name == LOGS_LOG_STREAM }.upload_sequence_token
42 | end
43 | end
44 |
45 | def publish_log(message_id, obj)
46 | publish_log_cloudwatch(obj)
47 | publish_log_s3(message_id, obj)
48 | end
49 |
50 | def publish_log_cloudwatch(obj)
51 | msg = obj.to_json
52 | @logs_lock.synchronize do
53 | puts "LOG: #{msg}"
54 | begin
55 | resp = @logs.put_log_events(
56 | log_group_name: LOGS_LOG_GROUP,
57 | log_stream_name: LOGS_LOG_STREAM,
58 | sequence_token: @logs_sequence_token,
59 | log_events: [
60 | {timestamp: (Time.now.to_f * 1000).to_i, message: msg},
61 | ]
62 | )
63 | rescue Exception => e
64 | warn "Error while publishing log to CloudWatchLogs: #{e.inspect}\n\t#{e.backtrace.join("\n\t")}"
65 | return
66 | end
67 |
68 | @logs_sequence_token = resp.next_sequence_token
69 | end
70 | end
71 |
72 | def publish_log_s3(msgid, obj)
73 | @s3.put_object(bucket: RESULT_S3_BUCKET, key: "#{RESULT_S3_PREFIX}/#{msgid}", content_type: 'application/json', body: obj.to_json)
74 | rescue Exception => e
75 | warn "Error while publishing log to S3: #{e.inspect}\n\t#{e.backtrace.join("\n\t")}"
76 | end
77 |
78 | def nw_locate
79 | `sorah-nw-simple-locate`.chomp
80 | end
81 |
82 | def current_track
83 | if @noop
84 | return 32.times.map { ('a'..'z').to_a.sample }.join
85 | end
86 |
87 | scpt = <<-EOS
88 | var it = Application("iTunes");
89 | var track = it.currentTrack();
90 | var res = "Nothing playing";
91 | if (track && it.playerState() == "playing") {
92 | res = track.name() + " - " + track.artist() + " (vol=" + Application("iTunes").soundVolume() + ")";
93 | }
94 |
95 | res
96 | EOS
97 |
98 | IO.popen(["osascript", "-l", "JavaScript"], 'w+') do |io|
99 | io.puts scpt
100 | io.close_write
101 | io.read.chomp
102 | end
103 | end
104 |
105 | def alarm!
106 | return if @noop
107 | scpt = <<-EOS
108 | var it = Application("iTunes");
109 | var state = it.playerState()
110 |
111 | var shouldChangePlaylist = state != "playing" || (it.currentPlaylist().name() != "Music" && it.currentPlaylist().name() != "__Smart");
112 |
113 | if (shouldChangePlaylist) {
114 | it.playlists["Music"].play();
115 |
116 | var shuffleMenu = Application("System Events").processes['iTunes'].menuBars[0].menuBarItems['Controls'].menus[0].menuItems['Shuffle'].menus[0];
117 | shuffleMenu.menuItems['On'].click();
118 | shuffleMenu.menuItems['Songs'].click();
119 | } else {
120 | it.play();
121 | }
122 |
123 | it.soundVolume = 100;
124 | EOS
125 |
126 | IO.popen(["osascript", "-l", "JavaScript"], 'w+') do |io|
127 | io.puts scpt
128 | io.close_write
129 | io.read
130 | end
131 | end
132 |
133 | def wakeup!(run_id: Time.now.to_s)
134 | alarm!
135 |
136 | cur = current_track
137 | puts "Woke up, playing: #{cur}"
138 | update_current_track(cur)
139 |
140 | tweet "@sorahers #sorah_wakeup #{run_id}", "@sora_h #sorah_wakeup #{run_id}"
141 |
142 | cur
143 | end
144 |
145 | def update_current_track(cur = current_track)
146 | if !@last_current_track_remote || @last_current_track_remote != cur || (@last_current_track_time && (Time.now - @last_current_track_time) > 120)
147 | resp = @s3.put_object(bucket: CURRENT_TXT_S3_BUCKET, key: CURRENT_TXT_S3_KEY, body: "#{cur}\n", content_type: 'text/plain')
148 | @last_current_track_time = Time.now
149 | @last_current_track_remote = cur
150 | puts "CURRENT PUT: #{cur}"
151 | resp
152 | else
153 | nil
154 | end
155 | end
156 |
157 | unless %w(americano americano-wlan.home.her americano.home.her americano.local).include?(Socket.gethostname)
158 | puts "This host (#{Socket.gethostname}) is not expected to run"
159 | loop { sleep 3600 }
160 | end
161 |
162 | profile = Aws::SharedCredentials.new(profile_name: AWS_PROFILE)
163 |
164 | @sqs = Aws::SQS::Client.new(credentials: profile, region: AWS_REGION)
165 |
166 | @s3 = Aws::S3::Client.new(credentials: profile, region: AWS_REGION)
167 | @last_current_track_remote = nil
168 | @last_current_track_time = nil
169 |
170 | @logs_lock = Mutex.new
171 | @logs = Aws::CloudWatchLogs::Client.new(credentials: profile, region: AWS_REGION)
172 | @logs_sequence_token = latest_sequence_token
173 |
174 | url = @sqs.get_queue_url(queue_name: SQS_QUEUE_NAME).queue_url
175 |
176 | Thread.new do
177 | poller = Aws::SQS::QueuePoller.new(url, client: @sqs)
178 | poller.poll do |msg|
179 | puts "Received a message: #{msg.inspect}"
180 | begin
181 | if nw_locate == 'home'
182 | puts "Waking up..."
183 | track = wakeup!(run_id: msg.message_id)
184 | publish_log(msg.message_id, {kind: 'ack', message_id: msg.message_id, track: track})
185 | else
186 | puts "Not in home, skipping"
187 | publish_log(msg.message_id, {kind: 'skip', message_id: msg.message_id, reason: 'agent not in home'})
188 | end
189 | rescue Exception => e
190 | warn "Oops, raised an error: #{e.inspect}\n\t#{e.backtrace.join("\n\t")}"
191 | publish_log(msg.message_id, {kind: 'error', message_id: msg.message_id, error: e.class.inspect})
192 | raise
193 | end
194 | puts "====="
195 | end
196 | end.abort_on_exception = true
197 |
198 | loop do
199 | begin
200 | update_current_track
201 | sleep 60
202 | rescue Interrupt, SignalException
203 | break
204 | rescue Exception => e
205 | warn "oops: #{e.inspect}"
206 | sleep 60
207 | end
208 | end
209 |
--------------------------------------------------------------------------------
/claude/CLAUDE.md:
--------------------------------------------------------------------------------
1 | # Claude Code Personal Preferences
2 |
3 | This file contains my personal preferences for Claude Code.
4 |
5 | ## General Instructions
6 |
7 | - Follow existing code conventions and patterns in each project
8 | - Prefer editing existing files over creating new ones
9 | - When writing a throwaway script, prefer Ruby (except in the case human request or the project has another preference) and bundler/inline for its dependencies
10 |
11 | ## Code Quality Standards
12 |
13 | - Do not leave empty lines containing only whitespace
14 | - Write clean, readable code that follows language conventions
15 | - Use consistent indentation and formatting
16 |
17 | ### Error handling
18 |
19 | - Avoid blanket exception handling (e.g. `rescue Exception`,`rescue StandardError`, `rescue` in Ruby) unless absolutely necessary
20 | - Error logging can be handled by runtime or frameworks
21 | - Prefer letting exceptions propagate up the call stack, unless you have a specific reason to catch them, such as:
22 | - Add context to the error
23 | - Perform cleanup operations
24 | - Convert one exception type to another with additional information
25 | - Recover from expected error conditions
26 | - Only catch specific exceptions when you have a meaningful way to handle them
27 |
28 | ### Code Comments
29 |
30 | Comments should not repeat what the code is saying. Use comments for explaining **why** something is being done, or to provide context that is not obvious from the code itself.
31 |
32 | **When to Comment:**
33 |
34 | - To explain why a particular approach or workaround was chosen
35 | - To clarify intent when the code could be misread or misunderstood
36 | - To provide context from external systems, specs, or requirements
37 | - To document assumptions, edge cases, or limitations
38 |
39 | **When Not to Comment:**
40 |
41 | - Don't narrate what the code is doing. The code already says that
42 | - Don't duplicate function or variable names in plain English
43 | - Don't leave stale comments that contradict the code
44 | - Don't reference removed or obsolete code paths (e.g. "No longer uses X format")
45 |
46 | ## Language-Specific Style Guides
47 |
48 | ### Ruby
49 |
50 | #### General Conventions
51 |
52 | - Explicit requires at top of file
53 | - Use keyword arguments for methods with multiple parameters
54 | - Prefer `attr_reader` over instance variable access
55 | - Omit hash or keyword argument value when it is identical to key; `{foo:}` instead of `{foo: foo}`
56 |
57 | #### Module and Class Structure
58 |
59 | - Especially in AWS Lambda environment, initialize with dependency injection via `environment:` parameter
60 |
61 | #### Method Definitions
62 |
63 | - Use `def self.method_name` for class methods
64 | - Short single-line methods when appropriate
65 | - Use guard clauses for early returns
66 |
67 | #### Error Handling
68 |
69 | - Rescue specific errors (e.g., `Aws::S3::Errors::NoSuchKey`)
70 | - Raise with descriptive messages
71 |
72 | #### AWS SDK Usage
73 |
74 | - Lazy initialize AWS clients as instance variables
75 | - Pass logger to AWS clients
76 | - Use symbolized keys for AWS responses
77 |
78 | #### Constants
79 |
80 | - Use SCREAMING_SNAKE_CASE for constants
81 |
82 | #### Data handling
83 |
84 | - Use `fetch` for required hash keys
85 | - Use Hash#fetch or Array#fetch when appropriate, especially when the key or index is expected to exist.
86 | - Consistent hash syntax with colons
87 | - Use Struct or Data classes when creating structs instead of raw Hashes.
88 |
89 | #### Logging
90 |
91 | - Use structured logging with JSON when appropriate
92 | - Log important operations (locks, state changes)
93 |
94 | ### Terraform
95 |
96 |
97 | #### General Conventions
98 |
99 | - Prefer data sources over hardcoded values (e.g., `data.aws_region.current`, `data.aws_caller_identity.current`)
100 | - Use locals for computed values and merging configuration
101 | - Use `jsonencode` to compose JSON objects, instead of raw string literal.
102 | - Always include common data sources in aws.tf: `data.aws_region.current`, `data.aws_caller_identity.current`, `data.aws_default_tags.current`
103 |
104 | #### Resource Naming
105 |
106 | - Use snake_case for all resource names and variables, except the followings:
107 | - But resource name in Terraform should match the actual resource name in a provider. e.g. When there is an attribute like `name = "FooBar"`, then `resource "..." "FooBar"`.
108 | - IAM role and policy names use PascalCase (e.g., `Ec2Bastion`, `EcsApp`)
109 | - Use hyphenated lowercase for resource identifiers (e.g., `ec2-default`, `dns-cache`)
110 |
111 | #### File Organization
112 |
113 | **Standard File Structure**: Each Terraform directory should follow this pattern:
114 | - `aws.tf` - AWS provider configuration with standard data sources
115 | - `backend.tf` - S3 backend configuration
116 | - `versions.tf` - Provider version constraints
117 | - Resource-specific files: `vpc.tf`, `sg.tf`, `route53.tf`, `iam.tf`, etc.
118 | - Multiple IAM files: `iam_lambda.tf`, `iam_states.tf`, `iam_ec2_default.tf`
119 | - `outputs.tf` - Output definitions (when needed)
120 | - `locals.tf` - Local values (when needed)
121 |
122 | ##### Variable Definitions
123 |
124 | - Always specify `type` for variables
125 | - Use `default = {}` for optional map variables
126 | - Group related variables together with blank lines
127 |
128 | ##### Resource Arguments
129 |
130 | - Multi-line arguments should be consistently formatted
131 | - Use trailing commas in lists
132 | - Align equals signs for readability in blocks
133 |
134 |
135 | #### AWS specific instructions
136 |
137 | - Use `data.aws_iam_policy_document` whenever possible, instead of jsonencode.
138 | - Use AWS managed policies via `data.aws_iam_policy` when available
139 |
140 | **Tags and Metadata**
141 |
142 | - Use default_tags at provider level for Project and Component tags
143 | - Include meaningful resource-specific tags when needed
144 | - Use descriptive comments for non-obvious configurations
145 |
146 | #### Resource and data source specific instruction
147 |
148 | **aws_iam_role**
149 | - IAM role trust policies use separate `data.aws_iam_policy_document` with `-trust` suffix
150 | - IAM policies split into multiple documents when they get large
151 | - Use specific resource ARNs in IAM policies, avoid wildcards where possible
152 | - Role names use PascalCase (e.g., `NetKea`, `NwEc2Default`)
153 | - Include descriptive `description` field referencing the Terraform path
154 |
155 | **aws_iam_instance_profile**
156 | - Name should match the associated IAM role name
157 | - Use `aws_iam_role.Role.name` for both name and role attributes
158 |
159 | ### Git Commit
160 |
161 | Follow these guidelines if there is no project-specific commit message convention.
162 |
163 | #### First Line (Summary)
164 | - Try to keep total length under 50 characters
165 | - Start with `{component_name}: ` prefix when possible
166 | - Component name can be shortened filename or directory name
167 | - Omit prefix if it would make the line too long
168 | - Use imperative mood (e.g., "Add feature" not "Added feature")
169 | - Use more contextful verbs than "Change", "Add", "Fix" or "Update"
170 | - Try to explain the "why" of the change, not just the "what"
171 |
172 | #### Additional Lines
173 | - Leave second line empty
174 | - Add detailed explanation, background, or reasoning in subsequent lines
175 | - Include relevant context that helps reviewers understand the change
176 |
177 | ## File Management
178 |
179 | - Never create files unless absolutely necessary
180 | - Always prefer editing existing files to creating new ones
181 | - Do not proactively create documentation files (*.md, README) unless explicitly requested
182 | - When working with a temporary file, temporary script, or temporary output, create them in `tmp/` directory under the repository root. No need to delete.
183 |
184 | ## Git GPG Signing
185 |
186 | - When encountered git commit error due to 'gpg: signing failed: Inappropriate ioctl for device', ask human to unlock their signing key instead of skipping signature.
187 |
--------------------------------------------------------------------------------
/setup.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env zsh
2 | arch=$1
3 | if [[ -z $arch && -e /etc/pacman.conf ]]; then
4 | arch=arch
5 | fi
6 | if [[ -z $arch && "_$(uname)" = "_Linux" ]]; then
7 | arch=linux
8 | fi
9 | if [[ -z $arch && "_$(uname)" = "_Darwin" ]]; then
10 | arch=mac
11 | fi
12 |
13 | if [ "_$arch" = "_mac" ]; then
14 | if ! which brew; then
15 | echo "Install homebrew first" 1>&2
16 | exit 1
17 | fi
18 | fi
19 |
20 | set -x
21 | shopt -s nullglob
22 |
23 |
24 | ln -sfn `pwd`/vim/dot.vim ~/.vim
25 | ln -s `pwd`/vim/dot.vim ~/.local/share/nvim/site
26 | ln -sf `pwd`/vim/dot.vimrc ~/.vimrc
27 | mkdir -p ~/.config/nvim
28 | ln -sf `pwd`/vim/dot.vimrc ~/.config/nvim/init.vim
29 | ln -sf `pwd`/vim/coc-settings.json ~/.config/nvim/coc-settings.json
30 | ln -sf `pwd`/zsh/dot.zshrc ~/.zshrc
31 | ln -sf `pwd`/zsh/${arch}.zshrc_global_env ~/.zshrc_global_env
32 | ln -sf `pwd`/tmux/tmux.conf ~/.tmux.conf
33 | ln -sf `pwd`/misc/dot.irbrc ~/.irbrc
34 | ln -sf `pwd`/misc/dot.gemrc ~/.gemrc
35 |
36 | mkdir -p ~/.config/wezterm; ln -sf `pwd`/wezterm.lua ~/.config/wezterm/wezterm.lua
37 |
38 | mkdir -p ~/.local/share/applications
39 | ln -s $(pwd)/dot.local/share/applications/sorah-browser.desktop ~/.local/share/applications/
40 |
41 | cat <<'EOF' > ~/.tmux.reattacher
42 | #!/bin/sh
43 | exec $*
44 | EOF
45 | chmod +x ~/.tmux.reattacher
46 | #mkdir -p ~/git/ruby/foo/{bin,lib}
47 |
48 | git config --global ghq.root $HOME/git
49 |
50 | if ! git config --global --get-regexp include.path '^~/git/config/misc/dot.gitconfig$' >/dev/null; then
51 | git config --global --add include.path '~/git/config/misc/dot.gitconfig'
52 | fi
53 |
54 | if [[ ! -e $HOME/.local/bin/mise ]]; then
55 | curl https://mise.run | bash
56 | eval "$($HOME/.local/bin/mise activate zsh)"
57 | fi
58 | mise settings paranoid=1
59 |
60 | mise use --global terraform@latest
61 | mise use --global 1password@latest
62 | mise use --global aqua:astral-sh/rye
63 | mise use --global aqua:astral-sh/uv
64 | mise use --global node@lts
65 |
66 | if [[ ! -e $HOME/.rustup ]]; then
67 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
68 | fi
69 | mkdir -p ~/.zfunc
70 | rustup completions zsh > ~/.zfunc/_rustup
71 |
72 | if [ "_$arch" = "_mac" ]; then
73 | mkdir -p `pwd`/mac/dot.config/karabiner
74 | ln -sf `pwd`/mac/dot.config/karabiner/karabiner.json ~/.config/karabiner/karabiner.json
75 | mkdir -p `pwd`/mac/dot.config/linearmouse
76 | ln -sf `pwd`/mac/dot.config/linearmouse/linearmouse.json ~/.config/linearmouse/linearmouse.json
77 |
78 | defaults write com.apple.dock workspaces-auto-swoosh -bool YES
79 | defaults write com.apple.dock autohide-time-modifier -float 0.5
80 | killall Dock
81 |
82 | mise use --global github-cli@latest
83 | mise use --global python@latest
84 | if ! which pipx 2>/dev/null; then
85 | pip install --user pipx
86 | fi
87 |
88 | if ! which gsed 2>/dev/null; then
89 | brew install gnu-sed
90 | fi
91 |
92 | if ! which jq 2>/dev/null; then
93 | mise use --global aqua:jqlang/jq
94 | fi
95 |
96 | if ! which go 2>/dev/null; then
97 | mise use --global core:go
98 | fi
99 |
100 | if ! which tmux 2>/dev/null; then
101 | brew install tmux
102 | fi
103 |
104 | if ! which gpg 2>/dev/null; then
105 | brew install gnupg2
106 | brew install pinentry-mac
107 | fi
108 |
109 | if ! which fzf 2>/dev/null; then
110 | mise use --global aqua:junegunn/fzf@latest
111 | fi
112 |
113 | if ! which rg 2>/dev/null; then
114 | mise use --global aqua:BurntSushi/ripgrep
115 | fi
116 |
117 | if ! which ghq 2>/dev/null; then
118 | mise use --global aqua:x-motemen/ghq
119 | fi
120 |
121 | if ! which protoc 2>/dev/null; then
122 | mise use --global aqua:protocolbuffers/protobuf/protoc
123 | fi
124 |
125 | if ! which neovim 2>/dev/null; then
126 | mise use --global aqua:neovim/neovim
127 | fi
128 |
129 | if ! which cloudflared 2>/dev/null; then
130 | mise use --global aqua:cloudflare/cloudflared
131 | fi
132 | fi
133 |
134 | # (prioritize python installed above in macOS)
135 | mise use --global aws-cli@latest
136 | mise use --global gcloud@latest
137 |
138 | mise use --global aqua:suzuki-shunsuke/pinact
139 |
140 | if [[ "_$arch" = "_arch" ]]; then
141 | if ! grep -q aur-sorah /etc/pacman.conf; then
142 | curl -Ssf https://sorah.jp/packaging/arch/17C611F16D92677398E0ADF51AD43CA09D82C624.txt | sudo pacman-key -a -
143 | sudo pacman-key --lsign-key 17C611F16D92677398E0ADF51AD43CA09D82C624
144 | sudo tee -a /etc/pacman.conf <<-'EOF'
145 | [aur-sorah]
146 | SigLevel = Required
147 | Server = https://arch.sorah.jp/$repo/os/$arch
148 | EOF
149 | fi
150 |
151 | mise use --global asdf:mise-plugins/mise-yay
152 |
153 | # https://unix.stackexchange.com/questions/274727/how-to-force-pacman-to-answer-yes-to-all-questions/584001#584001
154 | sudo pacman --needed --noconfirm --ask 54 -Syy \
155 | base-devel \
156 | gnupg pinentry \
157 | jq \
158 | screen tmux zsh \
159 | neovim \
160 | git \
161 | strace \
162 | git mercurial subversion \
163 | go go-tools \
164 | whois ipcalc iperf mtr nmap netcat tcpdump traceroute bind-tools wireguard-tools ethtool ldns \
165 | inetutils \
166 | ebtables nftables \
167 | swaks \
168 | bridge-utils \
169 | curl \
170 | pv \
171 | smartmontools usbutils \
172 | cryptsetup btrfs-progs dosfstools lvm2 xfsprogs \
173 | e2fsprogs \
174 | dool htop iotop lsof \
175 | parallel \
176 | imagemagick \
177 | ruby ruby-irb ruby-erb \
178 | nodejs \
179 | python-pip \
180 | python-pipx \
181 | keychain \
182 | fzf \
183 | ripgrep \
184 | ghq \
185 | github-cli \
186 | protobuf \
187 | patatt \
188 | mold \
189 | file findutils grep lsof \
190 | zip \
191 | cmake \
192 | openssl cfssl \
193 | cosign \
194 | man-db man-pages texinfo \
195 | postgresql-libs mariadb-clients \
196 | rbenv \
197 | docker-buildx \
198 | amazon-ecr-credential-helper
199 | yay -Sy bazelisk-bin cloudflared-bin \
200 | perl-file-rename \
201 | aws-session-manager-plugin \
202 | pristine-tar \
203 | terraform-ls \
204 | debianutils \
205 | devscripts \
206 | git-buildpackage \
207 | tio \
208 | envchain \
209 | overmind \
210 | jsonnet-language-server-bin
211 |
212 | if [[ ! -e ~/.rbenv/plugins/ruby-build ]]; then
213 | mkdir -p ~/.rbenv/plugins
214 | git clone https://github.com/rbenv/ruby-build ~/.rbenv/plugins/ruby-build
215 | fi
216 | fi
217 |
218 | mise use --global pipx:aws-sam-cli
219 | mise use --global npm:@anthropic-ai/claude-code@latest
220 | mise use --global npm:@google/gemini-cli@latest
221 | mise use --global npm:@playwright/mcp@latest
222 | mise use --global npm:difit@latest
223 |
224 | for x in mysqldef psqldef sqlite3def; do
225 | mise alias set "${x}" "ubi:sqldef/sqldef[exe=${x},matching=${x}]"
226 | mise use --global "${x}"
227 | done
228 |
229 | mise alias set smithy "github:smithy-lang/smithy[bin_path=bin,bin=smithy,strip_components=1]"
230 | mise alias set smithy-language-server "github:smithy-lang/smithy-language-server[bin_path=bin,bin=smithy-language-server]"
231 | mise use --global smithy
232 | mise use --global smithy-language-server
233 |
234 | if which go 2>/dev/null >/dev/null; then
235 | [ ! -d ~/.gopath ] && mkdir ~/.gopath
236 | [ ! -d ~/.gopath/src ] && ln -s ../git ~/.gopath/src
237 |
238 | export GOPATH=$HOME/.gopath
239 |
240 | if ! which gopls; then
241 | go install golang.org/x/tools/gopls@latest
242 | fi
243 | fi
244 |
245 | if which claude 2>/dev/null >/dev/null; then
246 | claude mcp get playwright || claude mcp add -s user playwright mcp-server-playwright
247 | claude mcp get aws-knowledge-mcp-server || claude mcp add -s user aws-knowledge-mcp-server -t http https://knowledge-mcp.global.api.aws
248 | fi
249 |
250 | if systemctl --version 2>/dev/null >/dev/null; then
251 | mkdir -p $HOME/.config/systemd/user
252 | for x in `pwd`/systemd/user/*; do
253 | cp -v "${x}" ~/.config/systemd/user/
254 | done
255 | systemctl --user daemon-reload
256 | fi
257 |
--------------------------------------------------------------------------------
/linux/x/dot.config/dunst/dunstrc:
--------------------------------------------------------------------------------
1 | [global]
2 | font = OpenSans 12
3 |
4 | # Allow a small subset of html markup:
5 | # bold
6 | # italic
7 | # strikethrough
8 | # underline
9 | #
10 | # For a complete reference see
11 | # .
12 | # If markup is not allowed, those tags will be stripped out of the
13 | # message.
14 | allow_markup = yes
15 |
16 | # The format of the message. Possible variables are:
17 | # %a appname
18 | # %s summary
19 | # %b body
20 | # %i iconname (including its path)
21 | # %I iconname (without its path)
22 | # %p progress value if set ([ 0%] to [100%]) or nothing
23 | # Markup is allowed
24 | format = "%s\n%b"
25 |
26 | # Sort messages by urgency.
27 | sort = yes
28 |
29 | # Show how many messages are currently hidden (because of geometry).
30 | indicate_hidden = yes
31 |
32 | # Alignment of message text.
33 | # Possible values are "left", "center" and "right".
34 | alignment = left
35 |
36 | # The frequency with wich text that is longer than the notification
37 | # window allows bounces back and forth.
38 | # This option conflicts with "word_wrap".
39 | # Set to 0 to disable.
40 | bounce_freq = 0
41 |
42 | # Show age of message if message is older than show_age_threshold
43 | # seconds.
44 | # Set to -1 to disable.
45 | show_age_threshold = 60
46 |
47 | # Split notifications into multiple lines if they don't fit into
48 | # geometry.
49 | word_wrap = yes
50 |
51 | # Ignore newlines '\n' in notifications.
52 | ignore_newline = no
53 |
54 |
55 | # The geometry of the window:
56 | # [{width}]x{height}[+/-{x}+/-{y}]
57 | # The geometry of the message window.
58 | # The height is measured in number of notifications everything else
59 | # in pixels. If the width is omitted but the height is given
60 | # ("-geometry x2"), the message window expands over the whole screen
61 | # (dmenu-like). If width is 0, the window expands to the longest
62 | # message displayed. A positive x is measured from the left, a
63 | # negative from the right side of the screen. Y is measured from
64 | # the top and down respectevly.
65 | # The width can be negative. In this case the actual width is the
66 | # screen width minus the width defined in within the geometry option.
67 | geometry = "300x5-30+20"
68 |
69 | # Shrink window if it's smaller than the width. Will be ignored if
70 | # width is 0.
71 | shrink = no
72 |
73 | # The transparency of the window. Range: [0; 100].
74 | # This option will only work if a compositing windowmanager is
75 | # present (e.g. xcompmgr, compiz, etc.).
76 | transparency = 0
77 |
78 | # Don't remove messages, if the user is idle (no mouse or keyboard input)
79 | # for longer than idle_threshold seconds.
80 | # Set to 0 to disable.
81 | idle_threshold = 120
82 |
83 | # Which monitor should the notifications be displayed on.
84 | monitor = 0
85 |
86 | # Display notification on focused monitor. Possible modes are:
87 | # mouse: follow mouse pointer
88 | # keyboard: follow window with keyboard focus
89 | # none: don't follow anything
90 | #
91 | # "keyboard" needs a windowmanager that exports the
92 | # _NET_ACTIVE_WINDOW property.
93 | # This should be the case for almost all modern windowmanagers.
94 | #
95 | # If this option is set to mouse or keyboard, the monitor option
96 | # will be ignored.
97 | follow = mouse
98 |
99 | # Should a notification popped up from history be sticky or timeout
100 | # as if it would normally do.
101 | sticky_history = yes
102 |
103 | # Maximum amount of notifications kept in history
104 | history_length = 20
105 |
106 | # Display indicators for URLs (U) and actions (A).
107 | show_indicators = yes
108 |
109 | # The height of a single line. If the height is smaller than the
110 | # font height, it will get raised to the font height.
111 | # This adds empty space above and under the text.
112 | line_height = 0
113 |
114 | # Draw a line of "separatpr_height" pixel height between two
115 | # notifications.
116 | # Set to 0 to disable.
117 | separator_height = 2
118 |
119 | # Padding between text and separator.
120 | padding = 8
121 |
122 | # Horizontal padding.
123 | horizontal_padding = 8
124 |
125 | # Define a color for the separator.
126 | # possible values are:
127 | # * auto: dunst tries to find a color fitting to the background;
128 | # * foreground: use the same color as the foreground;
129 | # * frame: use the same color as the frame;
130 | # * anything else will be interpreted as a X color.
131 | separator_color = frame
132 |
133 | # Print a notification on startup.
134 | # This is mainly for error detection, since dbus (re-)starts dunst
135 | # automatically after a crash.
136 | startup_notification = false
137 |
138 | # dmenu path.
139 | dmenu = /usr/bin/dmenu -p dunst:
140 |
141 | # Browser for opening urls in context menu.
142 | browser = /usr/bin/firefox -new-tab
143 |
144 | # Align icons left/right/off
145 | icon_position = off
146 |
147 | # Paths to default icons.
148 | icon_folders = /usr/share/icons/gnome/16x16/status/:/usr/share/icons/gnome/16x16/devices/
149 |
150 | [frame]
151 | width = 3
152 | color = "#aaaaaa"
153 |
154 | [shortcuts]
155 |
156 | # Shortcuts are specified as [modifier+][modifier+]...key
157 | # Available modifiers are "ctrl", "mod1" (the alt-key), "mod2",
158 | # "mod3" and "mod4" (windows-key).
159 | # Xev might be helpful to find names for keys.
160 |
161 | # Close notification.
162 | close = ctrl+space
163 |
164 | # Close all notifications.
165 | close_all = ctrl+shift+space
166 |
167 | # Redisplay last message(s).
168 | # On the US keyboard layout "grave" is normally above TAB and left
169 | # of "1".
170 | history = ctrl+grave
171 |
172 | # Context menu.
173 | context = ctrl+shift+period
174 |
175 | [urgency_low]
176 | # IMPORTANT: colors have to be defined in quotation marks.
177 | # Otherwise the "#" and following would be interpreted as a comment.
178 | background = "#222222"
179 | foreground = "#888888"
180 | timeout = 10
181 |
182 | [urgency_normal]
183 | background = "#285577"
184 | foreground = "#ffffff"
185 | timeout = 10
186 |
187 | [urgency_critical]
188 | background = "#900000"
189 | foreground = "#ffffff"
190 | timeout = 0
191 |
192 |
193 | # Every section that isn't one of the above is interpreted as a rules to
194 | # override settings for certain messages.
195 | # Messages can be matched by "appname", "summary", "body", "icon", "category",
196 | # "msg_urgency" and you can override the "timeout", "urgency", "foreground",
197 | # "background", "new_icon" and "format".
198 | # Shell-like globbing will get expanded.
199 | #
200 | # SCRIPTING
201 | # You can specify a script that gets run when the rule matches by
202 | # setting the "script" option.
203 | # The script will be called as follows:
204 | # script appname summary body icon urgency
205 | # where urgency can be "LOW", "NORMAL" or "CRITICAL".
206 | #
207 | # NOTE: if you don't want a notification to be displayed, set the format
208 | # to "".
209 | # NOTE: It might be helpful to run dunst -print in a terminal in order
210 | # to find fitting options for rules.
211 |
212 | #[espeak]
213 | # summary = "*"
214 | # script = dunst_espeak.sh
215 |
216 | #[script-test]
217 | # summary = "*script*"
218 | # script = dunst_test.sh
219 |
220 | #[ignore]
221 | # # This notification will not be displayed
222 | # summary = "foobar"
223 | # format = ""
224 |
225 | #[signed_on]
226 | # appname = Pidgin
227 | # summary = "*signed on*"
228 | # urgency = low
229 | #
230 | #[signed_off]
231 | # appname = Pidgin
232 | # summary = *signed off*
233 | # urgency = low
234 | #
235 | #[says]
236 | # appname = Pidgin
237 | # summary = *says*
238 | # urgency = critical
239 | #
240 | #[twitter]
241 | # appname = Pidgin
242 | # summary = *twitter.com*
243 | # urgency = normal
244 | #
245 | # vim: ft=cfg
246 |
--------------------------------------------------------------------------------
/bin/sorah-aws-ec2-run-instance:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require 'optparse'
3 | require 'open-uri'
4 | require 'tempfile'
5 | require 'aws-sdk-s3'
6 | require 'aws-sdk-ec2'
7 | require 'json'
8 | require 'yaml'
9 | require 'logger'
10 |
11 | Aws.config[:logger] = Logger.new($stderr)
12 |
13 | VPC_CACHE = File.expand_path '~/tmp/sorah-vpc.cachev1.yml'
14 | #AMI_CACHE = File.expand_path '~/tmp/sorah-ami.cachev1.json'
15 |
16 | edit = nil
17 |
18 | options = {
19 | names: [],
20 | instance_type: 't2.micro',
21 | vpc: nil,
22 | subnets: [],
23 | security_groups: %w(default),
24 | image_id: nil,
25 | key_name: 'sorah',
26 | public_ip: nil,
27 | volumes: { '/dev/sda1' => { device_name: '/dev/sda1', ebs: {volume_size: 15, volume_type: 'gp2'} } },
28 | role: nil,
29 | ebs_optimized: false,
30 | wait: false,
31 | }
32 |
33 | parser = OptionParser.new do |_|
34 | _.on('--no-edit') { edit = false }
35 | _.on('-e', '--edit') { edit = true }
36 |
37 | _.on('-w', '--wait') { options[:wait] = true }
38 |
39 | _.on('-n NAME', '--name NAME') { |n| options[:names] << n }
40 |
41 | _.on('-t TYPE', '--type TYPE') { |t| options[:instance_type] = t }
42 |
43 | _.on('-v VPC', '--vpc VPC') { |v| options[:vpc] = v }
44 | _.on('-s SUBNET', '--subnet SUBNET') { |s| options[:subnets] << s }
45 |
46 | _.on('--no-default-sg') { security_groups.delete('default') }
47 | _.on('-g SECURITYGROUP', '--sg SECURITYGROUP') { |g| options[:security_groups] << g }
48 |
49 | _.on('-i IMAGE', '--image') { |i| options[:image_id] = i }
50 |
51 | _.on('-k KEY', '--key KEY') { |k| options[:key_name] = k }
52 |
53 | _.on('-p', '--public-ip') { options[:public_ip ] = true }
54 | _.on('-P', '--no-public-ip') { options[:public_ip] = false }
55 |
56 | _.on('--ebs VOL', '-V VOL') { |v|
57 | param = v.split(?,).map { |kv| k,v=kv.split(?=,2); [k.to_sym, v] }.to_h
58 | spec = {
59 | device_name: param[:device] || param[:dev],
60 | ebs: {
61 | volume_type: param[:type] || 'standard',
62 | volume_size: (param[:size] || 15).to_i,
63 | }
64 | }
65 | spec[:ebs][:iops] = param[:iops] if param.key?(:iops)
66 | spec[:ebs][:snapshot_id] = param[:snapshot] if param.key?(:iops)
67 | options[:volumes][spec[:device_name]] = spec
68 | }
69 |
70 | _.on('-r ROLE', '--role ROLE') { |r| options[:role]= r }
71 |
72 | _.on('--ebs-optimized') { options[:ebs_optimized] = true }
73 |
74 | _.on('-T TAG', '--tag TAG') { |ts| k,v = ts.split(/[=:]/,2); (options[:tags] ||= {})[k] = v }
75 | end
76 |
77 | args = parser.parse(ARGV)
78 |
79 | #######
80 |
81 | @account_canonical_id = Aws::S3::Client.new(region: 'ap-northeast-1').list_buckets.owner.id
82 | @vpc_cache = File.exist?(VPC_CACHE) ? YAML.load(File.read(VPC_CACHE)) : {}
83 |
84 | @vpc_cache_touched = false
85 | at_exit {
86 | if @vpc_cache_touched
87 | File.write VPC_CACHE, YAML.dump(@vpc_cache)
88 | end
89 | }
90 |
91 | def regions
92 | @regions ||= Aws::EC2::Client.new(region: ENV['AWS_DEFAULT_REGION'] || 'ap-northeast-1').describe_regions.regions.map(&:region_name)
93 | end
94 |
95 | def vpcs(refresh: false)
96 | @vpc_cache[@account_canonical_id] = nil if refresh
97 | @vpc_cache_touched = true
98 | @vpc_cache[@account_canonical_id] ||= begin
99 | regions.map do |region|
100 | Thread.new do
101 | ec2 = Aws::EC2::Resource.new(region: region)
102 | ec2.vpcs.map do |vpc|
103 | Thread.new do
104 | vpc_name_tag = vpc.tags.find { |_| _.key == 'Name' }
105 | next unless vpc_name_tag
106 | subnets = vpc.subnets.map do |subnet|
107 | subnet_name_tag = subnet.tags.find { |_| _.key == 'Name' }
108 | next unless subnet_name_tag
109 | p [subnet_name_tag.value, id: subnet.subnet_id, cidr: subnet.cidr_block]
110 | end.compact.to_h
111 | [vpc_name_tag.value, id: vpc.vpc_id, subnets: subnets, region: region]
112 | end
113 | end.map(&:value).compact
114 | end
115 | end.flat_map(&:value).to_h
116 | end
117 | end
118 |
119 |
120 | #######
121 |
122 | if edit.nil?
123 | edit = options[:names].empty? || options[:image_id].nil? || options[:vpc].nil? || options[:subnets].empty?
124 | end
125 |
126 | if edit
127 | options = YAML.load(Tempfile.create("runinstance") { |f| f.puts options.to_yaml; f.flush; system(*%w(vi -N -u NORC --noplugin), f.path); f.rewind; f.read })
128 | end
129 |
130 | region = vpcs[options[:vpc]][:region]
131 | ec2 = Aws::EC2::Resource.new(region: region)
132 | vpc = ec2.vpc(vpcs[options[:vpc]][:id])
133 |
134 | image = case options[:image_id]
135 | when /\Aami-/
136 | ec2.image(options[:image_id])
137 | when 'gentoo-build'
138 | ec2.images(owners: %w(self), filters: [name: 'name', values: %w(gentoo-2*)]).sort_by {|_| Time.parse(_.creation_date) }.last
139 | when 'gentoo'
140 | ec2.images(owners: %w(self), filters: [name: 'name', values: %w(nkmi-base-gentoo-2*)]).sort_by {|_| Time.parse(_.creation_date) }.last
141 | when 'arch', nil
142 | ec2.images(owners: %w(093273469852), filters: [name: 'name', values: %w(arch-linux-hvm-*-ebs)]).sort_by {|_| Time.parse(_.creation_date) }.last
143 | when 'nkmi-arch'
144 | ec2.images(owners: %w(self), filters: [name: 'name', values: %w(nkmi-base-arch-2*)]).sort_by {|_| Time.parse(_.creation_date) }.last
145 | when 'isuxf-ci'
146 | ec2.images(owners: %w(self), filters: [name: 'name', values: %w(isucon10f-amd64-ci-*)]).sort_by {|_| Time.parse(_.creation_date) }.last
147 | when /\Aubuntu-(.+)/
148 | image_id = URI.open("https://cloud-images.ubuntu.com/query/#{$1}/server/released.current.txt",&:read).each_line.lazy.map { |_|
149 | name,_,_,release,type,arch,iregion,image_id,_,_,vt = _.chomp.split(?\t)
150 | {name: name, release: release, type: type, arch: arch, region: iregion, image_id: image_id, virtualization_type: vt}
151 | }.find { |release|
152 | release[:name] == $1 && release[:region] == region && release[:virtualization_type] == 'hvm' && release[:type] == 'ebs-ssd' && release[:arch] == 'amd64'
153 | }[:image_id]
154 | ec2.image(image_id)
155 | #when /\A{/
156 | # JSON.parse(options[:image_id])
157 | end
158 |
159 | block_device_mappings = options[:volumes].values
160 | security_groups = vpc.security_groups(filters: [name: 'group-name', values: options[:security_groups]]).map(&:group_id).uniq
161 |
162 | instances = options[:names].flat_map.with_index do |name, index|
163 | subnet_name = options[:subnets][index % options[:subnets].size]
164 | subnet = vpcs[options[:vpc]][:subnets][subnet_name]
165 |
166 | run_instance_options = {
167 | min_count: 1,
168 | max_count: 1,
169 | image_id: image.image_id,
170 | key_name: options[:key_name],
171 | instance_type: options[:instance_type],
172 | block_device_mappings: block_device_mappings,
173 | disable_api_termination: options[:disable_api_termination],
174 | network_interfaces: [
175 | {
176 | device_index: 0,
177 | subnet_id: subnet[:id],
178 | groups: security_groups,
179 | delete_on_termination: true,
180 | associate_public_ip_address: options[:public_ip],
181 | },
182 | ],
183 | iam_instance_profile: {
184 | name: options[:role],
185 | },
186 | ebs_optimized: options[:ebs_optimized],
187 | }
188 |
189 | if options[:public_ip].nil?
190 | run_instance_options[:network_interfaces].each do |nic|
191 | nic.delete :associate_public_ip_address
192 | end
193 | end
194 |
195 | tags = {'Name' => name}.merge(options[:tags] || {}).map do |k, v|
196 | {key: k.to_s, value: v.to_s}
197 | end
198 |
199 | Thread.new do
200 | ec2.create_instances(run_instance_options).to_a.tap do |instances|
201 | instances.each do |instance|
202 | instance.create_tags(tags: tags)
203 | end
204 | end
205 | end
206 | end.flat_map(&:value)
207 |
208 | until instances.all? { |_| _.private_ip_address }
209 | instances.map! do |instance|
210 | instance.reload
211 | end
212 | end
213 |
214 | instances.each do |instance|
215 | pubip_info = if instance.public_dns_name
216 | ": #{instance.public_dns_name} (#{instance.public_ip_address})"
217 | end
218 | warn "- #{instance.instance_id} (#{instance.vpc_id} / #{instance.subnet_id} / #{instance.placement.availability_zone})#{pubip_info}"
219 | end
220 |
221 | if options[:wait]
222 | instances.map do |instance|
223 | Thread.new do
224 | instance.wait_until(max_attempts:120, delay: 20) { |i| i.state.name == 'running' }
225 | end
226 | end.each(&:join)
227 | end
228 |
229 | ######
230 |
231 |
--------------------------------------------------------------------------------
/third_party/mrkn256.vim:
--------------------------------------------------------------------------------
1 | " Vim color file
2 | " Maintainer: Kenta Murata
3 |
4 | " These are the color theme designed by mrkn based on "desert256" theme
5 | " created by Henry So, Jr. This theme is designed to work with with 88-
6 | " and 256-color xterms.
7 | "
8 | " The ancestor version "desert256" theme is available at
9 | " http://www.vim.org/scripts/script.php?script_id=1243
10 | "
11 | " The real feature of this color scheme, with a wink to the "inkpot" theme, is
12 | " the programmatic approximation of the gui colors to the palettes of 88- and
13 | " 256- color xterms. The functions that do this (folded away, for
14 | " readability) are calibrated to the colors used for Thomas E. Dickey's xterm
15 | " (version 200), which is available at http://dickey.his.com/xterm/xterm.html.
16 | "
17 | " Henry had struggled with trying to parse the rgb.txt file to avoid the
18 | " necessity of converting color names to #rrggbb form, but decided it was just
19 | " not worth the effort. I thank a lot for his results.
20 |
21 | set background=dark
22 | if version > 580
23 | " no guarantees for version 5.8 and below, but this makes it stop
24 | " complaining
25 | hi clear
26 | if exists("syntax_on")
27 | syntax reset
28 | endif
29 | endif
30 | let g:colors_name="mrkn256"
31 |
32 | if has("gui_running") || &t_Co == 88 || &t_Co == 256
33 | " functions {{{
34 | " returns an approximate grey index for the given grey level
35 | function! grey_number(x)
36 | if &t_Co == 88
37 | if a:x < 23
38 | return 0
39 | elseif a:x < 69
40 | return 1
41 | elseif a:x < 103
42 | return 2
43 | elseif a:x < 127
44 | return 3
45 | elseif a:x < 150
46 | return 4
47 | elseif a:x < 173
48 | return 5
49 | elseif a:x < 196
50 | return 6
51 | elseif a:x < 219
52 | return 7
53 | elseif a:x < 243
54 | return 8
55 | else
56 | return 9
57 | endif
58 | else
59 | if a:x < 14
60 | return 0
61 | else
62 | let l:n = (a:x - 8) / 10
63 | let l:m = (a:x - 8) % 10
64 | if l:m < 5
65 | return l:n
66 | else
67 | return l:n + 1
68 | endif
69 | endif
70 | endif
71 | endfun
72 |
73 | " returns the actual grey level represented by the grey index
74 | function! grey_level(n)
75 | if &t_Co == 88
76 | if a:n == 0
77 | return 0
78 | elseif a:n == 1
79 | return 46
80 | elseif a:n == 2
81 | return 92
82 | elseif a:n == 3
83 | return 115
84 | elseif a:n == 4
85 | return 139
86 | elseif a:n == 5
87 | return 162
88 | elseif a:n == 6
89 | return 185
90 | elseif a:n == 7
91 | return 208
92 | elseif a:n == 8
93 | return 231
94 | else
95 | return 255
96 | endif
97 | else
98 | if a:n == 0
99 | return 0
100 | else
101 | return 8 + (a:n * 10)
102 | endif
103 | endif
104 | endfun
105 |
106 | " returns the palette index for the given grey index
107 | function! grey_color(n)
108 | if &t_Co == 88
109 | if a:n == 0
110 | return 16
111 | elseif a:n == 9
112 | return 79
113 | else
114 | return 79 + a:n
115 | endif
116 | else
117 | if a:n == 0
118 | return 16
119 | elseif a:n == 25
120 | return 231
121 | else
122 | return 231 + a:n
123 | endif
124 | endif
125 | endfun
126 |
127 | " returns an approximate color index for the given color level
128 | function! rgb_number(x)
129 | if &t_Co == 88
130 | if a:x < 69
131 | return 0
132 | elseif a:x < 172
133 | return 1
134 | elseif a:x < 230
135 | return 2
136 | else
137 | return 3
138 | endif
139 | else
140 | if a:x < 75
141 | return 0
142 | else
143 | let l:n = (a:x - 55) / 40
144 | let l:m = (a:x - 55) % 40
145 | if l:m < 20
146 | return l:n
147 | else
148 | return l:n + 1
149 | endif
150 | endif
151 | endif
152 | endfun
153 |
154 | " returns the actual color level for the given color index
155 | function! rgb_level(n)
156 | if &t_Co == 88
157 | if a:n == 0
158 | return 0
159 | elseif a:n == 1
160 | return 139
161 | elseif a:n == 2
162 | return 205
163 | else
164 | return 255
165 | endif
166 | else
167 | if a:n == 0
168 | return 0
169 | else
170 | return 55 + (a:n * 40)
171 | endif
172 | endif
173 | endfun
174 |
175 | " returns the palette index for the given R/G/B color indices
176 | function! rgb_color(x, y, z)
177 | if &t_Co == 88
178 | return 16 + (a:x * 16) + (a:y * 4) + a:z
179 | else
180 | return 16 + (a:x * 36) + (a:y * 6) + a:z
181 | endif
182 | endfun
183 |
184 | " returns the palette index to approximate the given R/G/B color levels
185 | function! color(r, g, b)
186 | " get the closest grey
187 | let l:gx = grey_number(a:r)
188 | let l:gy = grey_number(a:g)
189 | let l:gz = grey_number(a:b)
190 |
191 | " get the closest color
192 | let l:x = rgb_number(a:r)
193 | let l:y = rgb_number(a:g)
194 | let l:z = rgb_number(a:b)
195 |
196 | if l:gx == l:gy && l:gy == l:gz
197 | " there are two possibilities
198 | let l:dgr = grey_level(l:gx) - a:r
199 | let l:dgg = grey_level(l:gy) - a:g
200 | let l:dgb = grey_level(l:gz) - a:b
201 | let l:dgrey = (l:dgr * l:dgr) + (l:dgg * l:dgg) + (l:dgb * l:dgb)
202 | let l:dr = rgb_level(l:gx) - a:r
203 | let l:dg = rgb_level(l:gy) - a:g
204 | let l:db = rgb_level(l:gz) - a:b
205 | let l:drgb = (l:dr * l:dr) + (l:dg * l:dg) + (l:db * l:db)
206 | if l:dgrey < l:drgb
207 | " use the grey
208 | return grey_color(l:gx)
209 | else
210 | " use the color
211 | return rgb_color(l:x, l:y, l:z)
212 | endif
213 | else
214 | " only one possibility
215 | return rgb_color(l:x, l:y, l:z)
216 | endif
217 | endfun
218 |
219 | " returns the palette index to approximate the 'rrggbb' hex string
220 | function! rgb(rgb)
221 | let l:r = ("0x" . strpart(a:rgb, 0, 2)) + 0
222 | let l:g = ("0x" . strpart(a:rgb, 2, 2)) + 0
223 | let l:b = ("0x" . strpart(a:rgb, 4, 2)) + 0
224 |
225 | return color(l:r, l:g, l:b)
226 | endfun
227 |
228 | " sets the highlighting for the given group
229 | function! X(group, fg, bg, attr)
230 | if a:fg != ""
231 | exec "hi " . a:group . " guifg=#" . a:fg . " ctermfg=" . rgb(a:fg)
232 | endif
233 | if a:bg != ""
234 | exec "hi " . a:group . " guibg=#" . a:bg . " ctermbg=" . rgb(a:bg)
235 | endif
236 | if a:attr != ""
237 | exec "hi " . a:group . " gui=" . a:attr . " cterm=" . a:attr
238 | endif
239 | endfun
240 | " }}}
241 |
242 | call X("Normal", "cccccc", "000000", "")
243 |
244 | " highlight groups
245 | call X("Cursor", "708090", "f0e68c", "")
246 | "CursorIM
247 | call X("CursorColumn", "", "333333", "none")
248 | call X("CursorLine", "", "333333", "underline")
249 | "Directory
250 | "DiffAdd
251 | "DiffChange
252 | "DiffDelete
253 | "DiffText
254 | "ErrorMsg
255 | call X("VertSplit", "666666", "000000", "none")
256 | call X("Folded", "ffd700", "4d4d4d", "")
257 | call X("FoldColumn", "d2b48c", "4d4d4d", "")
258 | call X("IncSearch", "708090", "f0e68c", "")
259 | call X("LineNr", "666666", "", "none")
260 | call X("ModeMsg", "daa520", "", "")
261 | call X("MoreMsg", "2e8b57", "", "")
262 | call X("NonText", "666699", "", "none")
263 | call X("Question", "00ff7f", "", "")
264 | call X("Search", "f5deb3", "cd853f", "")
265 | call X("SpecialKey", "666699", "", "none")
266 | call X("StatusLine", "ffffff", "666666", "none")
267 | call X("StatusLineNC", "000000", "666666", "none")
268 | call X("Title", "cd5c5c", "", "")
269 | call X("Visual", "6b8e23", "f0e68c", "reverse")
270 |
271 | "VisualNOS
272 | call X("WarningMsg", "fa8072", "", "")
273 | "WildMenu
274 | "Menu
275 | "Scrollbar
276 | "Tooltip
277 | call X("Pmenu", "cccccc", "333333", "none")
278 | call X("PmenuSel", "663333", "cccccc", "bold")
279 | " call X("PmenuSbar", "", "", "")
280 | " call X("PmenuThumb", "", "", "")
281 |
282 | " syntax highlighting groups
283 | call X("Comment", "87ceeb", "", "")
284 | call X("Constant", "ffcc66", "", "")
285 | call X("Identifier", "99ff00", "", "none")
286 | call X("Statement", "6699ff", "", "none")
287 | call X("PreProc", "ff6666", "", "")
288 | call X("Type", "ffcc66", "", "none")
289 | call X("Special", "ffdead", "", "")
290 | "Underlined
291 | call X("Ignore", "666666", "", "")
292 | "Error
293 | call X("Todo", "ff4500", "eeee00", "")
294 |
295 | " delete functions {{{
296 | delf X
297 | delf rgb
298 | delf color
299 | delf rgb_color
300 | delf rgb_level
301 | delf rgb_number
302 | delf grey_color
303 | delf grey_level
304 | delf grey_number
305 | " }}}
306 | else
307 | " color terminal definitions
308 | hi SpecialKey ctermfg=darkgreen
309 | hi NonText cterm=bold ctermfg=darkblue
310 | hi Directory ctermfg=darkcyan
311 | hi ErrorMsg cterm=bold ctermfg=7 ctermbg=1
312 | hi IncSearch cterm=NONE ctermfg=yellow ctermbg=green
313 | hi Search cterm=NONE ctermfg=grey ctermbg=blue
314 | hi MoreMsg ctermfg=darkgreen
315 | hi ModeMsg cterm=NONE ctermfg=brown
316 | hi LineNr ctermfg=3
317 | hi Question ctermfg=green
318 | hi StatusLine cterm=bold,reverse
319 | hi StatusLineNC cterm=reverse
320 | hi VertSplit cterm=reverse
321 | hi Title ctermfg=5
322 | hi Visual cterm=reverse
323 | hi VisualNOS cterm=bold,underline
324 | hi WarningMsg ctermfg=1
325 | hi WildMenu ctermfg=0 ctermbg=3
326 | hi Folded ctermfg=darkgrey ctermbg=NONE
327 | hi FoldColumn ctermfg=darkgrey ctermbg=NONE
328 | hi DiffAdd ctermbg=4
329 | hi DiffChange ctermbg=5
330 | hi DiffDelete cterm=bold ctermfg=4 ctermbg=6
331 | hi DiffText cterm=bold ctermbg=1
332 | hi Comment ctermfg=darkcyan
333 | hi Constant ctermfg=brown
334 | hi Special ctermfg=5
335 | hi Identifier ctermfg=6
336 | hi Statement ctermfg=3
337 | hi PreProc ctermfg=5
338 | hi Type ctermfg=2
339 | hi Underlined cterm=underline ctermfg=5
340 | hi Ignore ctermfg=darkgrey
341 | hi Error cterm=bold ctermfg=7 ctermbg=1
342 | endif
343 |
344 | " vim: set fdl=0 fdm=marker:
345 |
--------------------------------------------------------------------------------
|