├── .gitignore ├── src ├── pipewire │ ├── mod.rs │ ├── schema │ │ ├── node.rs │ │ ├── device.rs │ │ └── mod.rs │ └── sink.rs ├── table.rs ├── i3status.rs ├── notify.rs ├── logging.rs ├── lib.rs ├── process.rs ├── ip_addr.rs ├── ring.rs ├── bin │ ├── humanizer.rs │ ├── change_sink.rs │ ├── slow_rust_tests.rs │ ├── print_todo.rs │ ├── templ.rs │ ├── dehn-polizei.rs │ ├── f.rs │ ├── netinfo.rs │ ├── clean_names.rs │ ├── headphone_battery.rs │ ├── polizei.rs │ ├── blur.rs │ └── staggered_backups.rs ├── exec.rs ├── fs.rs └── timer.rs ├── shell ├── screenshot ├── fix-perms ├── runtil_fail ├── mw ├── borg-backup ├── diary ├── dreams ├── hypr-has-window ├── hypr-workspace-has ├── sway-has-window ├── hddcheck ├── cleanup ├── sync-db ├── hotspot ├── hypr-close-special-workspace ├── reencode ├── git-diff-all ├── nuke_postgres ├── rotate ├── spoof ├── wscreenshot ├── hypr-keyboard-layout ├── sway-keyboard-layout ├── dim └── random_clips.py ├── hooks ├── post-merge └── post-rewrite ├── rustfmt.toml ├── .github ├── dependabot.yml └── workflows │ └── lint.yml ├── Justfile ├── install_global ├── install ├── Cargo.toml └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | temp 2 | other 3 | target 4 | -------------------------------------------------------------------------------- /src/pipewire/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod schema; 2 | pub mod sink; 3 | 4 | pub use schema::*; 5 | pub use sink::*; 6 | -------------------------------------------------------------------------------- /shell/screenshot: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | grim -g "$(slurp -o -r -c '#FF0000FF')" -t ppm - | satty --filename - 4 | -------------------------------------------------------------------------------- /hooks/post-merge: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # A git hook, which automatically deploys the scripts whenever we pull new files. 4 | ./install 5 | -------------------------------------------------------------------------------- /hooks/post-rewrite: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # A git hook, which automatically deploys the scripts whenever we pull new files. 4 | ./install 5 | -------------------------------------------------------------------------------- /shell/fix-perms: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | find -type d -exec chmod 755 {} \; 5 | find -type f -exec chmod 644 {} \; 6 | -------------------------------------------------------------------------------- /shell/runtil_fail: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | count=0 4 | echo "Run $count" 5 | ((++count)) 6 | 7 | while "$@"; do 8 | echo "Run $count" 9 | ((++count)) 10 | done 11 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | comment_width = 120 2 | format_code_in_doc_comments = true 3 | group_imports = "StdExternalCrate" 4 | imports_granularity = "Crate" 5 | imports_layout = "HorizontalVertical" 6 | reorder_imports = true 7 | wrap_comments = true 8 | -------------------------------------------------------------------------------- /shell/mw: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Rename a i3 workspace from $1 to $2 3 | 4 | if [ -z "$1" ]; then 5 | echo "Two arguments needed" 6 | elif [ -z "$2" ]; then 7 | echo "Two arguments needed" 8 | else 9 | command="rename workspace $1 to $2" 10 | echo $command 11 | i3-msg $command 12 | fi 13 | -------------------------------------------------------------------------------- /shell/borg-backup: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | borg create --stats root@jarvis:/var/lib/backup/borg/documents::$(date -I) ~/Dokumente 4 | borg create --stats root@jarvis:/var/lib/backup/borg/images::$(date -I) ~/Syncthing/Bilder 5 | if [[ "bomb" == $(hostname) ]]; then 6 | borg create --stats root@jarvis:/var/lib/backup/borg/music::$(date -I) ~/music 7 | fi 8 | -------------------------------------------------------------------------------- /shell/diary: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | year=$(date +"%Y") 5 | dir=~/Dokumente/Various/Tagebuch/$year 6 | 7 | if [ -z "${1+x}" ]; then 8 | filename="$(date -d "now - 6 hours" +'%Y-%m-%d')" 9 | else 10 | filename="$(date -d "now - 6 hours" +'%Y-%m-%d')_${1}" 11 | fi 12 | 13 | mkdir -p "$dir" 14 | 15 | vim "$dir/$filename" 16 | -------------------------------------------------------------------------------- /shell/dreams: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | topic=$1 5 | year=$(date +"%Y") 6 | dir=~/Syncthing/Tagebuch/dreams/$year 7 | 8 | if [ -z $topic ]; then 9 | filename="$(date -d "now - 6 hours" +'%Y-%m-%d')" 10 | else 11 | filename="$(date -d "now - 6 hours" +'%Y-%m-%d')_$topic" 12 | fi 13 | 14 | mkdir -p $dir 15 | 16 | vim $dir/$filename 17 | -------------------------------------------------------------------------------- /shell/hypr-has-window: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Usage: ./check_window.sh "Window Name or Class" 3 | # Returns 1 if there's an open window matching the name or class, 0 otherwise. 4 | set -euo pipefail 5 | 6 | TARGET="$1" 7 | 8 | hyprctl clients -j | jq -e --arg t "$TARGET" ' 9 | .[] | select((.title | test($t; "i")) or (.class | test($t; "i"))) 10 | ' >/dev/null 11 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: cargo 4 | directory: "/" 5 | schedule: 6 | interval: monthly 7 | groups: 8 | rust: 9 | patterns: 10 | - "*" 11 | - package-ecosystem: github-actions 12 | directory: "/" 13 | schedule: 14 | interval: monthly 15 | groups: 16 | actions: 17 | patterns: 18 | - "*" 19 | -------------------------------------------------------------------------------- /shell/hypr-workspace-has: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Usage: ./has_window_on_workspace.sh "workspace" "classname" 3 | # Returns 1 if there's a window with the given classname on the specified workspace, and 0 if there isn't. 4 | set -euo pipefail 5 | 6 | WORKSPACE="$1" 7 | CLASSNAME="$2" 8 | 9 | hyprctl clients -j | jq -e --arg w "special:$WORKSPACE" --arg c "$CLASSNAME" ' 10 | .[] | select(.workspace.name == $w and (.class | test($c; "i"))) 11 | ' >/dev/null 12 | -------------------------------------------------------------------------------- /shell/sway-has-window: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Usage: ./check_window.sh "Window Name or Class" 3 | # Returns 1 if there's an open window matching the name or class, 0 otherwise. 4 | 5 | set -euo pipefail 6 | 7 | TARGET="$1" 8 | 9 | swaymsg -t get_tree | jq -e --arg t "$TARGET" ' 10 | .. | objects 11 | | select(.window_properties?) 12 | | select( 13 | (.name // "") | test($t; "i") or 14 | (.class // "") | test($t; "i") 15 | ) 16 | ' >/dev/null 17 | -------------------------------------------------------------------------------- /src/table.rs: -------------------------------------------------------------------------------- 1 | use comfy_table::{ContentArrangement, Table, presets::UTF8_FULL_CONDENSED}; 2 | 3 | pub fn pretty_table() -> Table { 4 | let mut table = Table::new(); 5 | table.set_content_arrangement(ContentArrangement::Dynamic); 6 | table.load_preset(UTF8_FULL_CONDENSED); 7 | 8 | table 9 | } 10 | 11 | pub fn print_headline_table(header: String) { 12 | let mut table = pretty_table(); 13 | table.add_row(vec![header]); 14 | 15 | println!("{table}"); 16 | } 17 | -------------------------------------------------------------------------------- /shell/hddcheck: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Check all hard drives and display critical device stats. 3 | 4 | # Get all HDD devices 5 | devices=$(lsblk -d -o NAME -n) 6 | 7 | for device in $devices; do 8 | echo "Checking $device" 9 | smartctl -a "/dev/$device" | grep -e self-assessment \ 10 | -e Raw_Read_Error_Rate \ 11 | -e Reallocated_Sector_Ct \ 12 | -e Current_Pending_Sector \ 13 | -e Offline_Uncorrectable \ 14 | -e Power_On_Hours 15 | echo "" 16 | done 17 | -------------------------------------------------------------------------------- /src/i3status.rs: -------------------------------------------------------------------------------- 1 | use serde::Serialize; 2 | 3 | #[derive(Serialize, Default)] 4 | pub struct CustomBarStatus { 5 | pub text: String, 6 | #[serde(skip_serializing_if = "String::is_empty")] 7 | pub tooltip: String, 8 | #[serde(skip_serializing_if = "String::is_empty")] 9 | pub class: String, 10 | } 11 | 12 | impl CustomBarStatus { 13 | pub fn new(text: String) -> Self { 14 | Self { 15 | text, 16 | ..Default::default() 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /shell/cleanup: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Cache stuff 3 | rm -rf ~/.ansible 4 | rm -rf ~/.pkg-cache 5 | rm -rf ~/.cmake 6 | rm -rf ~/.subversion 7 | rm -rf ~/.dotnet 8 | rm -rf ~/.mono 9 | rm -rf ~/.berkshelf 10 | 11 | # History stuff 12 | rm -rf ~/.wget-hsts 13 | rm -rf ~/.bash_history 14 | rm -rf ~/.python_history 15 | rm -rf ~/.putty 16 | 17 | # Java stuff 18 | rm -rf ~/.java 19 | rm -rf ~/.gradle 20 | rm -rf ~/.rye 21 | 22 | # Wtf 23 | rm -rf ~/.gnome 24 | rm -rf ~/.android 25 | 26 | # Desktop/File manager 27 | rm -rf ~/.local/share/Trash/* 28 | rm -rf ~/Desktop 29 | -------------------------------------------------------------------------------- /shell/sync-db: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # A script to sync a remote postgres database to the current machine 3 | set -euo pipefail 4 | host=$1 5 | database=$2 6 | 7 | echo 'Dumping DB on remote' 8 | ssh $host "pg_dump -O -F c ${database} > ${database}.dump" 9 | echo 'Sync DB' 10 | scp $host:$database.dump /tmp/ 11 | 12 | echo 'Drop and recreate DB' 13 | dropdb $database || true 14 | createdb $database 15 | 16 | echo 'Restoring DB' 17 | pg_restore -O -j 4 -F c -d $database /tmp/$database.dump 18 | 19 | echo 'Deleting dumps' 20 | rm /tmp/$database.dump 21 | ssh $host "rm ${database}.dump" 22 | echo 'Done' 23 | -------------------------------------------------------------------------------- /shell/hotspot: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | # Get the current Mac address 5 | ipLinkString=($(ip addr)) 6 | 7 | for element in "${ipLinkString[@]}"; do 8 | # Get name of the current ethernet interface 9 | if [[ $element == "en"* ]]; then 10 | idLength=$(expr length $element)-1 11 | eth_interface=${element:0:$idLength} 12 | # Get name of the current wlan interface 13 | elif [[ $element == "wlan"* ]]; then 14 | idLength=$(expr length $element)-1 15 | wlan_interface=${element:0:$idLength} 16 | fi 17 | done 18 | 19 | sudo create_ap $wlan_interface $eth_interface $1 $2 20 | -------------------------------------------------------------------------------- /shell/hypr-close-special-workspace: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Usage: ./close_special 3 | # Close special workspace on focused output if one is present 4 | 5 | active=$(swaymsg -t get_outputs -r | jq -r '.[] | select(.focused==true).name') 6 | 7 | if [[ -n "$active" ]]; then 8 | # Assuming "special workspace" means a workspace named like "special:" 9 | special_ws=$(swaymsg -t get_workspaces -r | jq -r --arg output "$active" '.[] | select(.name | test("^special:" + $output + "$")) | .name') 10 | 11 | if [[ -n "$special_ws" ]]; then 12 | swaymsg workspace "$special_ws" 13 | swaymsg workspace back_and_forth 14 | fi 15 | fi 16 | -------------------------------------------------------------------------------- /shell/reencode: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Takes some file and reencodes it with sensible defaults to ~/ as mp4 with h.265 4 | 5 | set -euo pipefail 6 | 7 | original_path=$1 8 | filename=$(basename -- "$original_path") 9 | filename="${filename%.*}" 10 | destination_path=~/reencodes/$filename.mp4 11 | 12 | mkdir -p ~/reencodes 13 | 14 | nice -n 15 ffmpeg \ 15 | -i $original_path \ 16 | -map 0:v:0 \ 17 | -map 0:a:0 \ 18 | -sn \ 19 | -dn \ 20 | -c:a aac \ 21 | -b:a 128k \ 22 | -c:v libx265 \ 23 | -preset slower \ 24 | -x265-params \ 25 | -crf=19:pools=none \ 26 | -threads "$(nproc --all)" \ 27 | $destination_path 28 | -------------------------------------------------------------------------------- /shell/git-diff-all: -------------------------------------------------------------------------------- 1 | #!/bin/zsh 2 | # Show colored output for changed **AND** untracked files. 3 | file=$1 4 | output=$(git diff --color=always $file 2>/dev/null | delta) 5 | 6 | if [[ $output ]]; then 7 | echo $output 8 | elif [[ -f $file ]]; then 9 | git diff --color=always --no-index /dev/null $file | delta 10 | elif [[ $file ]]; then 11 | # Remove any old files 12 | # It's not easy to git diff a deleted file, as it doesn't have to be in-tree. 13 | # That's why we copy the last revision (HEAD) to our XDG runtime dir 14 | # so we can compare it with /dev/null. 15 | temp_path=$XDG_RUNTIME_DIR/git-diff-all 16 | rm $temp_path 17 | git show HEAD:./$file > $temp_path 18 | git diff --color=always --no-index $temp_path /dev/null | delta 19 | fi 20 | -------------------------------------------------------------------------------- /Justfile: -------------------------------------------------------------------------------- 1 | # If you change anything in here, make sure to also adjust the lint CI job! 2 | lint: 3 | cargo +nightly fmt --all -- --check 4 | taplo format --check 5 | cargo clippy --tests --workspace -- -D warnings 6 | 7 | format: 8 | just ensure-command taplo 9 | cargo +nightly fmt 10 | taplo format 11 | 12 | test: 13 | cargo test 14 | 15 | # Ensures that one or more required commands are installed 16 | ensure-command +command: 17 | #!/usr/bin/env bash 18 | set -euo pipefail 19 | 20 | read -r -a commands <<< "{{ command }}" 21 | 22 | for cmd in "${commands[@]}"; do 23 | if ! command -v "$cmd" > /dev/null 2>&1 ; then 24 | printf "Couldn't find required executable '%s'\n" "$cmd" >&2 25 | exit 1 26 | fi 27 | done 28 | -------------------------------------------------------------------------------- /shell/nuke_postgres: -------------------------------------------------------------------------------- 1 | #!/bin/env bash 2 | 3 | # Literally that, nuke the postgres data folder and create it anew 4 | 5 | read -p "Are you sure? (y/n): " choice 6 | case "$choice" in 7 | y | Y) echo ;; 8 | *) exit 1 ;; 9 | esac 10 | 11 | read -p "Like, are you sure you wanna nuke the DB? (YES/n): " choice 12 | case "$choice" in 13 | YES) echo ;; 14 | *) exit 1 ;; 15 | esac 16 | 17 | echo "Stopping postgres" 18 | sudo systemctl stop postgresql 19 | echo "Nuke and re-initialize postgres data folder" 20 | sudo rm -rf /var/lib/postgres/data 21 | sudo -u postgres initdb -D /var/lib/postgres/data --data-checksums --encoding=UTF8 22 | 23 | echo "Starting postgres" 24 | sudo systemctl start postgresql 25 | 26 | echo "Create Superuser nuke" 27 | sudo -u postgres createuser -s nuke 28 | echo "Create DB nuke" 29 | createdb nuke 30 | -------------------------------------------------------------------------------- /install_global: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Some best practice settings for bash scripts 3 | # set -e: automatically exits on any failing command 4 | # set -u: exits if there are any unset variables 5 | # set -o pipefail: automatically exits, if any command in a pipe fails (normally only the last is counted) 6 | set -euo pipefail 7 | 8 | cp hooks/* .git/hooks 9 | 10 | # Get absolute path this script's directory 11 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 12 | 13 | BIN_FOLDER="/usr/local/bin" 14 | 15 | echo "Deploying shell scripts" 16 | for file in $DIR/shell/* ; do 17 | file_name=$(basename $file) 18 | sudo ln -f -s $file $BIN_FOLDER/$file_name 19 | done 20 | 21 | # Rust scripts 22 | echo "Installing rust scripts" 23 | cargo build --release 24 | cd target/release/ 25 | sudo fd . ./ --type x --maxdepth 1 --exec cp {} /usr/local/bin/ 26 | -------------------------------------------------------------------------------- /install: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Some best practice settings for bash scripts 3 | # set -e: automatically exits on any failing command 4 | # set -u: exits if there are any unset variables 5 | # set -o pipefail: automatically exits, if any command in a pipe fails (normally only the last is counted) 6 | set -euo pipefail 7 | 8 | cp hooks/* .git/hooks 9 | 10 | # Get absolute path this script's directory 11 | DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" 12 | 13 | BIN_FOLDER="$HOME/.bin" 14 | # Install all bash scripts 15 | mkdir -p "$BIN_FOLDER" 16 | 17 | echo "Deploying shell scripts" 18 | for file in $DIR/shell/*; do 19 | file_name=$(basename "$file") 20 | if [ ! -L "$HOME/.bin/${file_name}" ]; then 21 | ln -s "$file" "$BIN_FOLDER/$file_name" 22 | fi 23 | done 24 | 25 | # Rust scripts 26 | echo "Installing rust scripts" 27 | cargo install --path "$DIR" 28 | -------------------------------------------------------------------------------- /src/notify.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, Result}; 2 | 3 | use crate::exec::Cmd; 4 | 5 | /// Send an urgent notification to the notification daemon. 6 | pub fn critical_notify(display_time: usize, message: String) -> Result<()> { 7 | // Inform the user about the sink we just switched to. 8 | Cmd::new(format!( 9 | "notify-send --urgency=critical --expire-time={display_time} '{message}'", 10 | )) 11 | .run_success() 12 | .context("Failed to send notification.")?; 13 | 14 | Ok(()) 15 | } 16 | 17 | /// Send a notification to the notification daemon. 18 | pub fn notify(display_time: usize, message: String) -> Result<()> { 19 | // Inform the user about the sink we just switched to. 20 | Cmd::new(format!( 21 | "notify-send --expire-time={display_time} '{message}'", 22 | )) 23 | .run_success() 24 | .context("Failed to send notification.")?; 25 | 26 | Ok(()) 27 | } 28 | -------------------------------------------------------------------------------- /src/logging.rs: -------------------------------------------------------------------------------- 1 | use simplelog::{Config, ConfigBuilder, LevelFilter, SimpleLogger}; 2 | 3 | /// Initialize the logger with the specified verbosity level. 4 | pub fn init_logger(level: u8) { 5 | let level = match level { 6 | 0 => LevelFilter::Error, 7 | 1 => LevelFilter::Warn, 8 | 2 => LevelFilter::Info, 9 | 3 => LevelFilter::Debug, 10 | _ => LevelFilter::Trace, 11 | }; 12 | 13 | // Try to initialize the logger with the timezone set to the Local time of the machine. 14 | let mut builder = ConfigBuilder::new(); 15 | let logger_config = match builder.set_time_offset_to_local() { 16 | Err(_) => { 17 | println!("Failed to determine the local time of this machine. Fallback to UTC."); 18 | Config::default() 19 | } 20 | Ok(builder) => builder.build(), 21 | }; 22 | 23 | SimpleLogger::init(level, logger_config).expect("Failed to init SimpleLogger"); 24 | } 25 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod exec; 2 | pub mod fs; 3 | pub mod i3status; 4 | pub mod ip_addr; 5 | pub mod logging; 6 | pub mod notify; 7 | pub mod pipewire; 8 | pub mod process; 9 | pub mod ring; 10 | pub mod table; 11 | pub mod timer; 12 | 13 | pub use anyhow::{Context, Result, anyhow, bail}; 14 | pub use fs::{FileType, get_newest_file, path_exists, read_dir_or_fail}; 15 | 16 | pub mod prelude { 17 | pub use super::{exec::*, fs::*}; 18 | } 19 | 20 | /// Generic setup function that will be called in all scripts 21 | pub fn setup() { 22 | // Beautify panics for better debug output. 23 | better_panic::install(); 24 | } 25 | 26 | pub fn sleep_seconds(seconds: u64) { 27 | std::thread::sleep(std::time::Duration::from_secs(seconds)); 28 | } 29 | 30 | #[macro_export] 31 | macro_rules! some_or_continue { 32 | ($res:expr) => { 33 | match $res { 34 | Some(val) => val, 35 | None => { 36 | continue; 37 | } 38 | } 39 | }; 40 | } 41 | -------------------------------------------------------------------------------- /shell/rotate: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Rotate: 3 | # 4 | # ffmpeg -i in.mov -vf "transpose=1" out.mov 5 | # 6 | # For the transpose parameter you can pass: 7 | # 8 | # 0 = 90CounterCLockwise and Vertical Flip (default) 9 | # 1 = 90Clockwise 10 | # 2 = 90CounterClockwise 11 | # 3 = 90Clockwise and Vertical Flip 12 | # 13 | # Use -vf "transpose=2,transpose=2" for 180 degrees. 14 | set -euo pipefail 15 | 16 | file=$2 17 | 18 | if [ "$1" == "left" ]; then 19 | direction="transpose=2" 20 | elif [ "$1" == "right" ]; then 21 | direction="transpose=1" 22 | elif [ "$1" == "down" ]; then 23 | direction="transpose=2,transpose=2" 24 | else 25 | echo "First argument must be either of [left, right down]" 26 | exit 1 27 | fi 28 | 29 | basename=$(basename -- "$file") 30 | extension="${basename##*.}" 31 | filename="${basename%.*}" 32 | 33 | temp_file="${filename}-temp.${extension}" 34 | 35 | rotate_command="ffmpeg -i $file -vf $direction $temp_file" 36 | 37 | eval $rotate_command 38 | 39 | if [ $? -eq 0 ]; then 40 | mv $temp_file $file 41 | else 42 | echo "Errored" 43 | fi 44 | -------------------------------------------------------------------------------- /shell/spoof: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | # Interface that should be spoofed 5 | interface=$1 6 | 7 | # Get the current Mac address 8 | ipLinkString=($(ip link show $interface)) 9 | 10 | count=0 11 | for element in "${ipLinkString[@]}"; do 12 | if [[ $element == "link/ether" ]]; then 13 | echo "Old Mac: ${ipLinkString[$count + 1]}" 14 | currentMac="${ipLinkString[$count + 1]}" 15 | break 16 | fi 17 | ((++count)) 18 | done 19 | 20 | # Generating new random mac, first 3 bytes stay the 21 | # same to ensure a correct vendor id 22 | firstMac=$(echo $currentMac | head -c 8) 23 | lastMac=$(dd bs=1 count=3 if=/dev/random 2>/dev/null | hexdump -v -e '/1 ":%02X"') 24 | mac=$firstMac$lastMac 25 | mac=${mac,,} 26 | 27 | # Stopping all networking and setting down interfaces 28 | sudo ip link set $interface down 29 | sudo ip link set dev $interface down 30 | 31 | # Changing mac address 32 | sudo ip link set dev $interface address $mac 33 | 34 | sudo ip link set dev $interface up 35 | sudo ip link set $interface up 36 | echo "New Mac: $mac" 37 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Code Style 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | paths: 7 | - ".github/workflows/lint.yml" 8 | - "**.rs" 9 | - "Cargo.toml" 10 | - "Cargo.lock" 11 | pull_request: 12 | branches: [main] 13 | paths: 14 | - ".github/workflows/lint.yml" 15 | - "**.rs" 16 | - "Cargo.toml" 17 | - "Cargo.lock" 18 | 19 | jobs: 20 | test: 21 | name: Tests on ${{ matrix.os }} for ${{ matrix.toolchain }} 22 | runs-on: ${{ matrix.os }} 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | os: [ubuntu-latest] 27 | 28 | steps: 29 | - name: Checkout code 30 | uses: actions/checkout@v6 31 | 32 | - name: Setup Rust toolchain 33 | uses: dtolnay/rust-toolchain@stable 34 | with: 35 | components: rustfmt, clippy 36 | 37 | - name: cargo build 38 | run: cargo build 39 | 40 | - name: cargo fmt 41 | run: cargo fmt 42 | 43 | - name: cargo clippy 44 | run: cargo clippy --tests -- -D warnings 45 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "script-utils" 3 | version = "0.1.0" 4 | authors = ["Arne Beer "] 5 | edition = "2024" 6 | 7 | [lib] 8 | name = "script_utils" 9 | path = "src/lib.rs" 10 | 11 | [profile.release] 12 | lto = "thin" 13 | 14 | [lints.clippy] 15 | # This one is having too many false-positives and it makes the code harder to read. 16 | assigning_clones = "allow" 17 | 18 | [dependencies] 19 | anyhow = "1" 20 | better-panic = "0.3" 21 | chrono = { version = "0.4", features = ["now"] } 22 | clap = { version = "4", features = ["derive"] } 23 | dirs = "6" 24 | comfy-table = "7" 25 | image = { version = "0.25", features = [ 26 | "png", 27 | "jpeg", 28 | "webp", 29 | "rayon", 30 | ], default-features = false } 31 | log = "0.4" 32 | procfs = { version = "0.18", default-features = false } 33 | rayon = "1" 34 | regex = "1" 35 | serde = { version = "1", features = ["derive"] } 36 | serde_json = "1" 37 | serde_yaml = "0.9" 38 | shellexpand = "3" 39 | simplelog = { version = "0.12" } 40 | strum = { version = "0.27", features = ["derive"] } 41 | subprocess = "0.2" 42 | tera = "1" 43 | toml = "0.9" 44 | users = "0.11" 45 | 46 | [dev-dependencies] 47 | rstest = "0.26" 48 | -------------------------------------------------------------------------------- /src/process.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use procfs::process::all_processes; 3 | 4 | /// Get all cmdlines of currently running processes. 5 | pub fn get_process_cmdlines(current_user_id: u32) -> Result> { 6 | let processes = all_processes()? 7 | .filter_map(|process| process.ok()) 8 | // We're only interested in alive processes that belong to the current user. 9 | .filter(|process| { 10 | let uid = if let Ok(uid) = process.uid() { 11 | uid 12 | } else { 13 | return false; 14 | }; 15 | process.is_alive() && uid == current_user_id 16 | }) 17 | .filter_map(|process| { 18 | // Don't include the process if we cannot get the cmdline. 19 | if let Ok(cmdline) = process.cmdline() { 20 | // Only get the first few strings which should include the name of the game. 21 | if cmdline.len() < 6 { 22 | Some(cmdline.join(" ")) 23 | } else { 24 | let (left, _) = cmdline.split_at(5); 25 | Some(left.join(" ")) 26 | } 27 | } else { 28 | None 29 | } 30 | }) 31 | .collect(); 32 | 33 | Ok(processes) 34 | } 35 | -------------------------------------------------------------------------------- /shell/wscreenshot: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #1. Parameter: Sleep time 4 | #2. Parameter: Quality 5 | #3. Parameter: Workspace(s) 6 | set -euo pipefail 7 | 8 | #Example: wscreenshot 5 80 1,2,3 9 | 10 | resolution_x=2560 11 | resolution_y=1440 12 | 13 | sleeptime=${1:-5} 14 | quality=${2:-100} 15 | workspaces=$(echo $3 | tr "," "\n") 16 | 17 | allowed_spaces=() 18 | for space in $workspaces; do 19 | allowed_spaces+=('"current_workspace":"'$space'"') 20 | done 21 | 22 | echo "Quality: $quality" 23 | echo "Sleeping for $sleeptime seconds" 24 | 25 | while true; do 26 | output=$(i3-msg -t "get_outputs") 27 | lock_active=$(ps -aux | grep i3lock | wc -l) 28 | name=$(date +%Y-%m-%d-%H%M%S_${resolution_x}x${resolution_y}_scrot.png) 29 | 30 | on_screen=false 31 | for space in "${allowed_spaces[@]}"; do 32 | grep -q "$space" <<<"$output" 33 | if [[ $? -eq 0 ]]; then 34 | on_screen=true 35 | break 36 | fi 37 | done 38 | # Actually do the screenshot 39 | # it should be cropped later 40 | if [[ $lock_active -gt 1 ]]; then 41 | echo "i3 lock is active" 42 | elif [ "$on_screen" == false ]; then 43 | echo "Not on specified Workspaces" 44 | elif [ "$on_screen" == true ]; then 45 | scrot -q $quality $name 46 | fi 47 | sleep $sleeptime 48 | done 49 | -------------------------------------------------------------------------------- /src/ip_addr.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use serde::Deserialize; 3 | 4 | use crate::exec::Cmd; 5 | 6 | pub fn get_interfaces() -> Result> { 7 | let capture = Cmd::new("ip -j addr").run_success()?; 8 | let interfaces: Vec = serde_json::from_str(&capture.stdout_str())?; 9 | 10 | Ok(interfaces) 11 | } 12 | 13 | /// The entry struct for `ip -j addr` output. 14 | #[derive(Debug, Deserialize)] 15 | pub struct Interface { 16 | pub ifname: String, 17 | pub addr_info: Vec, 18 | // pub ifindex: usize, 19 | // pub flags: Vec, 20 | // pub mtu: usize, 21 | // pub qdisc: String, 22 | pub operstate: String, 23 | // pub group: Option, 24 | // pub txqlen: usize, 25 | // pub link_type: String, 26 | // pub address: Option, 27 | // pub broadcast: Option, 28 | } 29 | 30 | #[derive(Debug, Deserialize)] 31 | pub struct AddrInfo { 32 | pub family: String, 33 | pub local: String, 34 | // pub prefixlen: usize, 35 | // pub metric: Option, 36 | // pub broadcast: Option, 37 | // pub scope: String, 38 | // pub dynamic: Option, 39 | // pub label: Option, 40 | // pub valid_life_time: usize, 41 | // pub preferred_life_time: usize, 42 | } 43 | -------------------------------------------------------------------------------- /shell/hypr-keyboard-layout: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | language=$1 5 | languages="de eu state toggle" 6 | 7 | state_file="${XDG_RUNTIME_DIR}"/keyboard_layout 8 | 9 | if [[ ! " ${languages} " == *" $1 "* ]]; then 10 | echo "Invalid subcommand." 11 | echo "Valid subcommands are: $languages" 12 | exit 1 13 | fi 14 | 15 | if [ "${language}" == "state" ]; then 16 | if [ ! -f $state_file ]; then 17 | state="eu" 18 | else 19 | state="$(cat $state_file)" 20 | fi 21 | echo "{\"icon\":\"keyboard\", \"text\": \"${state}\"}" 22 | elif [ "${language}" == "de" ]; then 23 | hyprctl keyword input:kb_layout de >/dev/null 24 | hyprctl keyword input:kb_options caps:escape >/dev/null 25 | 26 | echo 'de' >$state_file 27 | elif [ "${language}" == "eu" ]; then 28 | hyprctl keyword input:kb_layout eu >/dev/null 29 | hyprctl keyword input:kb_model altg_weur >/dev/null 30 | hyprctl keyword \ 31 | input:kb_options \ 32 | caps:escape,lv3:ralt_switch,altwin:swap_lalt_lwin \ 33 | >/dev/null 34 | 35 | echo 'eu' >$state_file 36 | elif [ "${language}" == "toggle" ]; then 37 | # Toggle to the other language, depending on the current layout 38 | # If no state file exists, switch to de 39 | if [ ! -f $state_file ]; then 40 | hypr-keyboard-layout de 41 | elif [ "$(cat $state_file)" == "de" ]; then 42 | hypr-keyboard-layout eu 43 | else 44 | hypr-keyboard-layout de 45 | fi 46 | fi 47 | -------------------------------------------------------------------------------- /shell/sway-keyboard-layout: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | language=$1 5 | commands="de eu state toggle" 6 | 7 | state_file="${XDG_RUNTIME_DIR}/keyboard_layout" 8 | 9 | if [[ ! " ${commands} " == *" $1 "* ]]; then 10 | echo "Invalid subcommand." 11 | echo "Valid subcommands are: $commands" 12 | exit 1 13 | fi 14 | 15 | if [ "${language}" == "state" ]; then 16 | if [ ! -f "$state_file" ]; then 17 | state="eu" 18 | else 19 | state="$(cat "$state_file")" 20 | fi 21 | echo "{\"icon\":\"keyboard\", \"text\": \"${state}\"}" 22 | elif [ "${language}" == "de" ]; then 23 | >&2 echo "Running explicit de command" 24 | >&2 swaymsg input "type:keyboard" xkb_layout de >/dev/null 25 | >&2 swaymsg input "type:keyboard" xkb_options caps:escape >/dev/null 26 | 27 | echo 'de' >"$state_file" 28 | elif [ "${language}" == "eu" ]; then 29 | >&2 echo "Running explicit eu command" 30 | >&2 swaymsg input "type:keyboard" xkb_layout eu >/dev/null 31 | >&2 swaymsg input "type:keyboard" xkb_model altg_weur >/dev/null 32 | >&2 swaymsg input "type:keyboard" xkb_options caps:escape 33 | 34 | echo 'eu' >"$state_file" 35 | elif [ "${language}" == "toggle" ]; then 36 | >&2 echo "Running toggle command" 37 | if [ ! -f "$state_file" ]; then 38 | >&2 echo "No state file found" 39 | sway-keyboard-layout de 40 | elif [ "$(cat "$state_file")" == "de" ]; then 41 | >&2 echo "Current state language is de" 42 | sway-keyboard-layout eu 43 | else 44 | >&2 echo "Current state language is en" 45 | sway-keyboard-layout de 46 | fi 47 | fi 48 | -------------------------------------------------------------------------------- /src/pipewire/schema/node.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | 3 | /// Representation of a Pipewire device 4 | #[derive(Debug, Deserialize, Clone)] 5 | pub struct Node { 6 | pub id: usize, 7 | #[serde(rename = "type")] 8 | pub node_type: String, 9 | pub info: NodeInfo, 10 | } 11 | 12 | /// Detailed info about a device 13 | #[derive(Debug, Deserialize, Clone)] 14 | pub struct NodeInfo { 15 | pub props: NodeProps, 16 | pub state: String, 17 | } 18 | 19 | #[derive(Debug, Deserialize, Clone)] 20 | pub struct NodeProps { 21 | /// Info about the parent device this node belongs to 22 | #[serde(rename = "device.id")] 23 | pub device_id: usize, 24 | #[serde(rename = "device.api")] 25 | pub device_api: Option, 26 | #[serde(rename = "device.class")] 27 | pub device_class: Option, 28 | 29 | /// Info about the device profile this node belongs to 30 | #[serde(rename = "device.profile.description")] 31 | pub device_profile_description: Option, 32 | #[serde(rename = "device.profile.name")] 33 | pub device_profile_name: Option, 34 | 35 | /// Info about this very node 36 | #[serde(rename = "node.name")] 37 | pub node_name: String, 38 | #[serde(rename = "node.description")] 39 | pub node_description: String, 40 | #[serde(rename = "node.nick")] 41 | pub node_nick: Option, 42 | 43 | /// The object properties of this node. 44 | #[serde(rename = "object.id")] 45 | pub object_id: usize, 46 | #[serde(rename = "object.path")] 47 | pub object_path: Option, 48 | #[serde(rename = "object.serial")] 49 | pub object_serial: usize, 50 | 51 | /// The media info of this node 52 | #[serde(rename = "media.class")] 53 | pub media_class: String, 54 | 55 | /// The client this node belongs to 56 | #[serde(rename = "client.id")] 57 | pub client_id: usize, 58 | } 59 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Custom scripts 2 | 3 | A collection of custom Rust scripts for personal usage. 4 | 5 | Run `./install.sh` to install all. This deploys: 6 | 7 | - Rust scripts to `$CARGO_HOME/bin/` 8 | - Shell scripts to `~/.bin/` 9 | 10 | ## Dependencies 11 | 12 | - `rust` to compile the rust scripts 13 | - `scrot` for `bin/screenlock` 14 | - `iwconfig` for `netinfo` (pkg: `wireless_tools`) 15 | - `iw` for `netinfo` 16 | - `pw-dump` for `bin/fix_xonar_output` and `bin/change_sink` 17 | 18 | ## Installation 19 | 20 | ### Via Script 21 | 22 | There's the `./install.sh` script, which does all of the work for you. 23 | 24 | 1. It copies all shell scripts into your `~/.bin` folder. 25 | 1. It compiles the rust scripts and copies them over to your `~/.bin` folder. 26 | 1. Make sure to add your `~/.bin` to your path. 27 | 28 | If you want to adjust the target directory (`~/.bin`), update the `BIN_FOLDER` variable in the `install.sh` script. 29 | 30 | ### Manual installation 31 | 32 | For the shell scripts: 33 | - Just copy any script from the `./shell` folder you like to your target directory. 34 | 35 | For the rust code: 36 | 37 | Option 1: 38 | 39 | - Run `cargo install --path ./` 40 | - Add the `$CARGO_HOME/bin` to your `$PATH`, which by default is `~/.cargo/bin`. 41 | 42 | Option 2: 43 | 44 | - Run `cargo build --locked --release` 45 | - Copy the binaries you want from the `./target/release/` folder to your target directory. 46 | 47 | ## Git Hooks 48 | 49 | There're two hooks, which automatically deploy the project when pulling new commits. 50 | 51 | Great for syncing changes between multiple machines. 52 | 53 | ## Screenlock 54 | 55 | Screenlock trigger on sleep via a `systemd` service looks like this: 56 | 57 | ``` 58 | [Unit] 59 | Description=Lock the screen 60 | Before=sleep.target 61 | 62 | [Service] 63 | User=%i 64 | Group=%i 65 | Type=forking 66 | Environment=DISPLAY=:0 67 | ExecStart=/home/%i/.cache/cargo/bin/blur 5 -vvv 68 | 69 | [Install] 70 | WantedBy=sleep.target 71 | ``` 72 | -------------------------------------------------------------------------------- /src/pipewire/schema/device.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | 3 | /// Representation of a Pipewire device 4 | #[derive(Debug, Deserialize, Clone)] 5 | pub struct Device { 6 | pub id: usize, 7 | pub info: DeviceInfo, 8 | #[serde(rename = "type")] 9 | pub device_type: String, 10 | } 11 | 12 | /// Detailed info about a device 13 | #[derive(Debug, Deserialize, Clone)] 14 | pub struct DeviceInfo { 15 | pub props: DeviceProps, 16 | pub params: Params, 17 | } 18 | 19 | #[derive(Debug, Deserialize, Clone)] 20 | pub struct DeviceProps { 21 | #[serde(rename = "device.api")] 22 | pub device_api: String, 23 | #[serde(rename = "device.description")] 24 | pub device_description: String, 25 | #[serde(rename = "device.name")] 26 | pub device_name: String, 27 | 28 | /// The object properties of this device. 29 | #[serde(rename = "object.id")] 30 | pub object_id: usize, 31 | #[serde(rename = "object.path")] 32 | pub object_path: Option, 33 | #[serde(rename = "object.serial")] 34 | pub object_serial: usize, 35 | 36 | /// The media info of this node 37 | #[serde(rename = "media.class")] 38 | pub media_class: String, 39 | 40 | /// The client this device belongs to 41 | #[serde(rename = "client.id")] 42 | pub client_id: usize, 43 | } 44 | 45 | #[derive(Debug, Deserialize, Clone)] 46 | pub struct Params { 47 | #[serde(rename = "EnumProfile", default)] 48 | pub profiles: Vec, 49 | #[serde(rename = "EnumRoute", default)] 50 | pub routes: Vec, 51 | } 52 | 53 | /// A device can have multiple in-/outgoing routes. 54 | /// Each has their own profile 55 | /// 56 | /// This profile info contains some interesting data, such as, whether a cable is 57 | /// plugged in or not. 58 | #[derive(Debug, Deserialize, Clone)] 59 | pub struct Profile { 60 | pub index: usize, 61 | pub name: String, 62 | pub description: String, 63 | // "yes"|"no"|"unknown" 64 | pub available: String, 65 | } 66 | -------------------------------------------------------------------------------- /src/ring.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Result, bail}; 2 | 3 | #[derive(Debug)] 4 | pub struct Ring { 5 | cursor: usize, 6 | data: Vec, 7 | } 8 | 9 | /// Some structure with a static amount 10 | impl Ring { 11 | /// Initialize the ring with a static array of data. 12 | /// The array may not be empty. 13 | pub fn new(data: Vec) -> Result> { 14 | if data.is_empty() { 15 | bail!("Ring cannot work with an empty Vec"); 16 | } 17 | 18 | Ok(Ring { cursor: 0, data }) 19 | } 20 | 21 | /// Get the current entry in the ring. 22 | /// This panics if the ring is empty. 23 | pub fn get(&mut self) -> &T { 24 | &self.data[self.cursor] 25 | } 26 | 27 | /// Move the cursor to the next element and return the element. 28 | /// This panics if the ring is empty. 29 | #[allow(clippy::should_implement_trait)] 30 | pub fn next(&mut self) -> &T { 31 | // If we're at the end of the array, move to the start. 32 | if self.data.get(self.cursor + 1).is_none() { 33 | self.cursor = 0; 34 | } else { 35 | self.cursor += 1; 36 | } 37 | 38 | &self.data[self.cursor] 39 | } 40 | 41 | /// Move the cursor to the previous element and return the element. 42 | /// This panics if the ring is empty. 43 | pub fn prev(&mut self) -> &T { 44 | // If we're at the start of the array, move to the end. 45 | if self.cursor == 0 { 46 | self.cursor = self.data.len() - 1; 47 | } else { 48 | self.cursor -= 1; 49 | } 50 | 51 | &self.data[self.cursor] 52 | } 53 | 54 | /// Move the cursor to the first element that matches the given criteria. 55 | /// If none is found, do nothing and return `None`. 56 | pub fn find(&mut self, find: Filter) -> Option<&T> 57 | where 58 | Filter: Fn(&(usize, &T)) -> bool, 59 | { 60 | let (index, _) = self.data.iter().enumerate().find(find)?; 61 | 62 | self.cursor = index; 63 | Some(&self.data[self.cursor]) 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/bin/humanizer.rs: -------------------------------------------------------------------------------- 1 | //! A small helper binary to convert some raw values into human readable form. 2 | //! For now, this includes: 3 | //! 4 | //! - Seconds -> Datetime 5 | //! - Nanoseconds -> Datetime 6 | use anyhow::{Context, Result, bail}; 7 | use chrono::TimeDelta; 8 | use clap::Parser; 9 | 10 | #[derive(Parser, Debug)] 11 | #[clap( 12 | name = "Humanizer", 13 | about = "Bring your machine output into human readable form", 14 | author = "Arne Beer " 15 | )] 16 | pub struct CliArguments { 17 | #[clap(subcommand)] 18 | pub cmd: SubCommand, 19 | } 20 | 21 | #[derive(Parser, Debug)] 22 | pub enum SubCommand { 23 | /// Format something time related 24 | Time(Time), 25 | } 26 | 27 | #[derive(Parser, Debug)] 28 | pub struct Time { 29 | /// Convert nano seconds to human readable time 30 | #[clap(short, long)] 31 | pub nanos: Option, 32 | 33 | /// Convert nano seconds to human readable time 34 | #[clap(short, long)] 35 | pub seconds: Option, 36 | } 37 | 38 | fn main() -> Result<()> { 39 | // Parse commandline options. 40 | let args = CliArguments::parse(); 41 | 42 | match args.cmd { 43 | SubCommand::Time(time) => format_time(time), 44 | } 45 | } 46 | 47 | pub fn format_time(time: Time) -> Result<()> { 48 | let mut duration = if let Some(nanos) = time.nanos { 49 | TimeDelta::nanoseconds(nanos) 50 | } else if let Some(seconds) = time.seconds { 51 | TimeDelta::try_seconds(seconds).context("Failed to convert seconds.")? 52 | } else { 53 | bail!("Either specify nanos or seconds"); 54 | }; 55 | 56 | let days = duration.num_days(); 57 | duration -= TimeDelta::try_days(days).context("Failed to convert days")?; 58 | 59 | let hours = duration.num_hours(); 60 | duration -= TimeDelta::try_hours(hours).context("Failed to convert hours")?; 61 | 62 | let minutes = duration.num_minutes(); 63 | duration -= TimeDelta::try_minutes(minutes).context("Failed to convert minutes")?; 64 | 65 | let seconds = duration.num_seconds(); 66 | 67 | let mut formatted = String::new(); 68 | if days > 0 { 69 | formatted.push_str(&format!("{days} days ")); 70 | } 71 | formatted.push_str(&format!("{hours:02}:{minutes:02}:{seconds:02}")); 72 | 73 | print!("{formatted}"); 74 | 75 | Ok(()) 76 | } 77 | -------------------------------------------------------------------------------- /shell/dim: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # hypr-dimmer.sh - Control screen dimming shader in Hyprland 4 | # 5 | # Usage: 6 | # hypr-dimmer.sh up # increase brightness one step 7 | # hypr-dimmer.sh down # decrease brightness one step 8 | # hypr-dimmer.sh set # set brightness directly (20–100) 9 | # hypr-dimmer.sh status # show current brightness 10 | # 11 | 12 | STATEFILE="${XDG_RUNTIME_DIR}/hypr-dimmer.state" 13 | SHADER_DIR="${HOME}/.config/hypr/shaders" 14 | 15 | # Allowed dim levels (20%..100%, step of 10) 16 | LEVELS=(2 5 10 20 30 40 50 60 70 80 90 100) 17 | 18 | # Read current level or default to 100 (shader off) 19 | read_state() { 20 | if [[ -f "$STATEFILE" ]]; then 21 | cat "$STATEFILE" 22 | else 23 | echo 100 24 | fi 25 | } 26 | 27 | # Save current level 28 | write_state() { 29 | echo "$1" >"$STATEFILE" 30 | } 31 | 32 | # Apply shader based on level 33 | apply_shader() { 34 | local level="$1" 35 | if [[ "$level" -eq 100 ]]; then 36 | # Disable shader 37 | hyprctl keyword decoration:screen_shader "" 38 | else 39 | hyprctl keyword decoration:screen_shader "${SHADER_DIR}/dim_${level}.frag" 40 | fi 41 | write_state "$level" 42 | } 43 | 44 | # Find next/previous level in LEVELS 45 | step_level() { 46 | local current="$1" dir="$2" 47 | for i in "${!LEVELS[@]}"; do 48 | if [[ "${LEVELS[$i]}" -eq "$current" ]]; then 49 | if [[ "$dir" == "up" && $i -lt $((${#LEVELS[@]} - 1)) ]]; then 50 | echo "${LEVELS[$((i + 1))]}" 51 | return 52 | elif [[ "$dir" == "down" && $i -gt 0 ]]; then 53 | echo "${LEVELS[$((i - 1))]}" 54 | return 55 | else 56 | echo "$current" 57 | return 58 | fi 59 | fi 60 | done 61 | echo "$current" # fallback 62 | } 63 | 64 | # --- Commands --- 65 | cmd="$1" 66 | case "$cmd" in 67 | up) 68 | current=$(read_state) 69 | next=$(step_level "$current" up) 70 | apply_shader "$next" 71 | ;; 72 | down) 73 | current=$(read_state) 74 | next=$(step_level "$current" down) 75 | apply_shader "$next" 76 | ;; 77 | set) 78 | val="$2" 79 | if [[ " ${LEVELS[*]} " == *" $val "* ]]; then 80 | apply_shader "$val" 81 | else 82 | echo "Invalid value. Use one of: ${LEVELS[*]}" 83 | exit 1 84 | fi 85 | ;; 86 | status) 87 | echo "Current dim level: $(read_state)%" 88 | ;; 89 | *) 90 | echo "Usage: dim {up|down|set <20-100>|status}" 91 | exit 1 92 | ;; 93 | esac 94 | -------------------------------------------------------------------------------- /src/exec.rs: -------------------------------------------------------------------------------- 1 | //! This is a convenience layer around [Subprocess's Exec](subprocess.Exec). 2 | //! It provides simple exit handling for single Commands. 3 | //! This doesn't have pipe support yet. 4 | use std::collections::HashMap; 5 | 6 | use anyhow::{Result, bail}; 7 | use shellexpand::tilde; 8 | use subprocess::{CaptureData, Exec, Redirection}; 9 | 10 | pub struct Cmd { 11 | cwd: Option, 12 | env: HashMap, 13 | command: String, 14 | } 15 | 16 | impl Cmd { 17 | /// Create a new wrapper with the command that should be executed. 18 | pub fn new(command: T) -> Cmd { 19 | Cmd { 20 | command: command.to_string(), 21 | env: HashMap::new(), 22 | cwd: None, 23 | } 24 | } 25 | 26 | /// Set the current working directory of the process. 27 | pub fn cwd(mut self, dir: T) -> Cmd { 28 | self.cwd = Some(dir.to_string()); 29 | 30 | self 31 | } 32 | 33 | /// Set the current working directory of the process. 34 | pub fn env(mut self, key: S, value: T) -> Cmd { 35 | self.env.insert(key.to_string(), value.to_string()); 36 | self 37 | } 38 | 39 | /// Run the command and return the exit status 40 | pub fn run(&self) -> Result { 41 | let mut exec = Exec::shell(&self.command) 42 | .stdout(Redirection::Pipe) 43 | .stderr(Redirection::Merge); 44 | 45 | // Set the current working directory. 46 | if let Some(cwd) = &self.cwd { 47 | exec = exec.cwd(tilde(&cwd).to_string()); 48 | } 49 | 50 | for (key, value) in self.env.iter() { 51 | exec = exec.env(key, value); 52 | } 53 | 54 | // Check if there are any critical errors. 55 | let capture = match exec.capture() { 56 | Ok(capture) => capture, 57 | Err(error) => { 58 | bail!( 59 | "Failed during: {} \nCritical error: {}", 60 | &self.command, 61 | error 62 | ); 63 | } 64 | }; 65 | 66 | Ok(capture) 67 | } 68 | 69 | /// A wrapper around `run` that also errors on non-zero exit statuses 70 | pub fn run_success(&self) -> Result { 71 | let capture = self.run()?; 72 | 73 | // Return an error on any non-1 exit codes 74 | if !capture.exit_status.success() { 75 | bail!( 76 | "Failed during: {}\nGot non-zero exit code: {:?}:\n{}", 77 | &self.command, 78 | capture.exit_status, 79 | capture.stdout_str(), 80 | ); 81 | } 82 | 83 | Ok(capture) 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /src/pipewire/schema/mod.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, Result}; 2 | pub use device::*; 3 | pub use node::*; 4 | use serde_json::Value; 5 | 6 | use crate::prelude::Cmd; 7 | 8 | pub mod device; 9 | pub mod node; 10 | 11 | /// Parse the output of `pw_dump` and return a list of devices and nodes. 12 | pub fn parse_pw_dump() -> Result<(Vec, Vec)> { 13 | let mut devices = Vec::new(); 14 | let mut nodes = Vec::new(); 15 | 16 | // First off, get the raw serde json representation. 17 | // There're many pipewire object types in the output we aren't interested in. 18 | // We're going to filter out only those we want. 19 | let capture = Cmd::new("pw-dump").run_success()?; 20 | let objects: Vec = serde_json::from_str(&capture.stdout_str())?; 21 | 22 | for object in objects { 23 | // The output should only contain objects. Everything else could be ignored anyway. 24 | let Value::Object(values) = &object else { 25 | continue; 26 | }; 27 | 28 | match values.get("type").unwrap() { 29 | Value::String(object_type) => match object_type.as_str() { 30 | "PipeWire:Interface:Node" => { 31 | // There are a few default drivers we're really not interested in. 32 | // We're only interested in those nodes that have a valid associated device. 33 | // Such nodes have a `node.info.props.device.id` property. 34 | let Some(Value::Object(infos)) = values.get("info") else { 35 | continue; 36 | }; 37 | let Some(Value::Object(props)) = infos.get("props") else { 38 | continue; 39 | }; 40 | let Some(Value::Number(_device_id)) = props.get("device.id") else { 41 | continue; 42 | }; 43 | 44 | // On top of that, we must have a media_class, otherwise we cannot do anything 45 | // with it anyway. 46 | let Some(Value::String(media_class)) = props.get("media.class") else { 47 | continue; 48 | }; 49 | // Furthermore, we're only interested in audio sinks. 50 | if !media_class.starts_with("Audio") { 51 | continue; 52 | } 53 | 54 | // We now know that there's a device id in there, so let's include that node. 55 | let node: Node = serde_json::from_value(object.clone()) 56 | .context(format!("Failed to parse node: {:#?}", object.clone()))?; 57 | nodes.push(node); 58 | } 59 | "PipeWire:Interface:Device" => { 60 | let device: Device = serde_json::from_value(object.clone()) 61 | .context(format!("Failed to parse device: {:#?}", object.clone()))?; 62 | devices.push(device); 63 | } 64 | _ => continue, 65 | }, 66 | _ => continue, 67 | } 68 | } 69 | 70 | Ok((devices, nodes)) 71 | } 72 | -------------------------------------------------------------------------------- /src/bin/change_sink.rs: -------------------------------------------------------------------------------- 1 | //! Small convenience script to quickly change the output sink. 2 | //! It also moves all current outputs to that sink as well. 3 | //! 4 | //! This is currently used by me via shortcuts. 5 | //! Needed binaries: 6 | //! - pw-dump 7 | //! - pactl 8 | use anyhow::Result; 9 | use clap::{ArgAction, Parser}; 10 | use script_utils::{exec::Cmd, logging, notify::*, pipewire::*}; 11 | use strum::Display; 12 | 13 | #[derive(Parser, Debug)] 14 | #[clap( 15 | name = "change_sink", 16 | about = "Change the current sink to the specified device", 17 | author = "Arne Beer " 18 | )] 19 | struct CliArguments { 20 | /// Verbose mode (-v, -vv, -vvv) 21 | #[clap(short, long, action = ArgAction::Count)] 22 | pub verbose: u8, 23 | 24 | /// The command to execute. 25 | #[command(subcommand)] 26 | pub command: Command, 27 | } 28 | 29 | #[derive(Parser, Display, Clone, Debug, PartialEq)] 30 | pub enum Command { 31 | // Go to the next device 32 | Next, 33 | // Go to the previous device 34 | Previous, 35 | // Switch to the default built-in device. 36 | BuiltIn, 37 | // Switch to a specific target 38 | Target { target: String }, 39 | // List all devices 40 | List, 41 | } 42 | fn main() -> Result<()> { 43 | // Parse commandline options. 44 | let args = CliArguments::parse(); 45 | logging::init_logger(args.verbose); 46 | 47 | let device = match args.command { 48 | Command::Next => rotate_sink(Direction::Next)?, 49 | Command::Previous => rotate_sink(Direction::Previous)?, 50 | Command::BuiltIn => get_sinks()? 51 | .into_iter() 52 | .find(|device| device.info.props.node_description.starts_with("Built-in")), 53 | Command::Target { ref target } => get_sinks()? 54 | .into_iter() 55 | .find(|device| device.info.props.node_description.starts_with(target)), 56 | Command::List => { 57 | list_sinks()?; 58 | return Ok(()); 59 | } 60 | }; 61 | 62 | let Some(device) = device else { 63 | critical_notify( 64 | 1500, 65 | format!( 66 | "Could not find target sink for command: {:#?}", 67 | &args.command 68 | ), 69 | )?; 70 | 71 | return Ok(()); 72 | }; 73 | 74 | switch_device(&device)?; 75 | 76 | Ok(()) 77 | } 78 | 79 | /// Set the target device as the default sink. 80 | /// Also take all inputs that're currently open and move them over to the target device. 81 | /// This allows for a clean transition of any active streams when switching devices. 82 | fn switch_device(node: &Node) -> Result<()> { 83 | let props = &node.info.props; 84 | // Set the default sink. 85 | Cmd::new(format!("wpctl set-default {}", props.object_id)).run_success()?; 86 | 87 | move_inputs_to_sink(props.object_serial)?; 88 | 89 | // Inform the user about the sink we just switched to. 90 | notify(1500, format!("Changed sink to {}", props.node_description))?; 91 | 92 | Ok(()) 93 | } 94 | 95 | /// Get the list of all active sinks and print them to the commandline. 96 | fn list_sinks() -> Result<()> { 97 | let nodes = get_sinks()?; 98 | 99 | if nodes.is_empty() { 100 | println!("Found no sinks"); 101 | return Ok(()); 102 | } 103 | 104 | println!("Found the following sinks:"); 105 | for node in nodes.iter() { 106 | let props = &node.info.props; 107 | println!( 108 | "{}:\n \ 109 | Serial: {}\n \ 110 | Description: {}\n \ 111 | Name: {}\n \ 112 | ", 113 | props.object_id, props.object_serial, props.node_description, props.node_name, 114 | ); 115 | } 116 | 117 | Ok(()) 118 | } 119 | -------------------------------------------------------------------------------- /src/bin/slow_rust_tests.rs: -------------------------------------------------------------------------------- 1 | //! Get the output of the following command and sort them by execution time. 2 | //! ```sh 3 | //! cargo +nightly test -- 4 | //! --quiet \ 5 | //! -Z unstable-options \ 6 | //! --format json \ 7 | //! --report-time > target/debug/test.json 8 | //! ``` 9 | use std::path::PathBuf; 10 | 11 | use anyhow::{Context, Result}; 12 | use clap::{ArgAction, Parser}; 13 | use comfy_table::Table; 14 | use script_utils::logging; 15 | use serde::Deserialize; 16 | 17 | #[derive(Debug, Deserialize)] 18 | enum Event { 19 | #[serde(rename = "started")] 20 | Started, 21 | #[serde(rename = "ok")] 22 | Ok, 23 | } 24 | 25 | #[allow(unused)] 26 | #[derive(Debug, Deserialize)] 27 | struct TestReport { 28 | /// What kind of event triggered this report. 29 | event: Event, 30 | /// The name of either the suite or the name of the test. 31 | name: String, 32 | /// The execution time of this single test. 33 | exec_time: Option, 34 | } 35 | 36 | /// Main output for outpuft for 37 | #[allow(unused)] 38 | #[derive(Debug, Deserialize)] 39 | #[serde(tag = "type")] 40 | enum Report { 41 | /// Info about a full test suite. 42 | #[serde(rename = "suite")] 43 | Suite { 44 | /// What kind of event triggered this report. 45 | event: Event, 46 | /// The amount of tests that're filtered out. 47 | test_count: Option, 48 | /// The execution time of the full suite. 49 | exec_time: Option, 50 | /// The amount of tests that passed. 51 | passed: Option, 52 | /// The amount of ignored tests. 53 | ignored: Option, 54 | /// The amount of measured tests. 55 | measured: Option, 56 | /// The amount of failed tests. 57 | failed: Option, 58 | /// The amount of tests that're filtered out. 59 | filtered_out: Option, 60 | }, 61 | /// Info about an actual test. 62 | #[serde(rename = "test")] 63 | Test(TestReport), 64 | } 65 | 66 | #[derive(Parser, Debug)] 67 | #[clap( 68 | name = "Slow Rust Test Finder", 69 | about = "Sort and format a list of test execution time so we can easily find slow tests.", 70 | author = "Arne Beer " 71 | )] 72 | pub struct CliArguments { 73 | /// The path to the json test result file. 74 | pub path: PathBuf, 75 | 76 | /// Verbose mode (-v, -vv, -vvv) 77 | #[clap(short, long, action = ArgAction::Count)] 78 | pub verbose: u8, 79 | 80 | /// Any tests below this value in 'ms' won't be shown in the resulting table. 81 | #[clap(short, long, default_value = "500")] 82 | pub threshold: usize, 83 | } 84 | 85 | /// Print a string, representing the current network state with IP. 86 | fn main() -> Result<()> { 87 | let args = CliArguments::parse(); 88 | logging::init_logger(args.verbose); 89 | 90 | let file = std::fs::read_to_string(&args.path).context("Failed to read test state file:")?; 91 | 92 | // Collect all reports of finished successful tests. 93 | let mut tests = Vec::new(); 94 | 95 | // Each line in this document is a full 96 | for line in file.lines() { 97 | let report: Report = 98 | serde_json::from_str(line).context(format!("Failed to parse line: {line}"))?; 99 | match report { 100 | Report::Suite { .. } => continue, 101 | Report::Test(test) => { 102 | // Don't add non-successful tests to the list. 103 | if !matches!(test.event, Event::Ok) { 104 | continue; 105 | } 106 | if let Some(exec_time) = test.exec_time { 107 | // Don't display tests that're below the minimum thresold. 108 | if args.threshold as f32 / 1000.0 > exec_time { 109 | continue; 110 | } 111 | tests.push(test); 112 | } 113 | } 114 | } 115 | } 116 | 117 | tests.sort_by(|a, b| a.exec_time.partial_cmp(&b.exec_time).unwrap()); 118 | 119 | let mut table = Table::new(); 120 | table.set_header(vec!["Exec time", "name"]); 121 | table.set_content_arrangement(comfy_table::ContentArrangement::Dynamic); 122 | for test in tests { 123 | table.add_row(vec![ 124 | format!("{:.2}", test.exec_time.unwrap()), 125 | test.name.to_string(), 126 | ]); 127 | } 128 | 129 | println!("{table}"); 130 | 131 | Ok(()) 132 | } 133 | -------------------------------------------------------------------------------- /src/bin/print_todo.rs: -------------------------------------------------------------------------------- 1 | //! This script prints a minimal summary of my todo list. 2 | //! It's designed for use in a status bar. 3 | use std::{fs::read_to_string, path::PathBuf}; 4 | 5 | use anyhow::Result; 6 | use clap::Parser; 7 | use script_utils::{Context, i3status::CustomBarStatus}; 8 | use serde::Serialize; 9 | 10 | #[derive(Parser, Debug)] 11 | pub struct CliArguments { 12 | /// The path to the todo markdown file. 13 | pub path: PathBuf, 14 | } 15 | 16 | #[derive(Serialize, Debug, Clone)] 17 | pub struct Todo { 18 | pub name: String, 19 | pub items: Vec, 20 | } 21 | 22 | impl Todo { 23 | pub fn new(name: String) -> Self { 24 | Self { 25 | name, 26 | items: Vec::new(), 27 | } 28 | } 29 | } 30 | 31 | #[derive(Serialize, Debug, Clone)] 32 | pub struct Item { 33 | pub name: String, 34 | pub completed: bool, 35 | } 36 | 37 | impl Item { 38 | pub fn new(name: String, completed: bool) -> Self { 39 | Self { name, completed } 40 | } 41 | } 42 | 43 | pub fn todos_as_i3bar_output(_todos: Vec) -> CustomBarStatus { 44 | let text = String::new(); 45 | 46 | CustomBarStatus::new(text) 47 | } 48 | 49 | pub fn todos_as_waybar_output(todos: Vec) -> CustomBarStatus { 50 | let mut text = String::new(); 51 | let mut tooltip = String::new(); 52 | 53 | let todo_count = todos.len(); 54 | if todo_count == 0 { 55 | text.push_str("Neat :3") 56 | } else { 57 | text = format!("{todo_count} todos") 58 | } 59 | 60 | for todo in todos { 61 | tooltip.push_str(" "); 62 | tooltip.push_str(&todo.name); 63 | for item in todo.items { 64 | tooltip.push('\r'); 65 | if item.completed { 66 | tooltip.push(''); 67 | } else { 68 | tooltip.push('󱘹'); 69 | } 70 | tooltip.push_str(&item.name); 71 | } 72 | tooltip.push('\r'); 73 | tooltip.push('\r'); 74 | } 75 | 76 | println!("{text}"); 77 | println!("{tooltip}"); 78 | 79 | let mut status = CustomBarStatus::new(text); 80 | status.tooltip = tooltip; 81 | 82 | status 83 | } 84 | 85 | /// Simply read a file and print a few lines of output 86 | fn main() -> Result<()> { 87 | // Parse commandline options. 88 | let args = CliArguments::parse(); 89 | 90 | if !args.path.exists() { 91 | println!("Nothing to do :)"); 92 | return Ok(()); 93 | } 94 | 95 | let content = read_to_string(args.path).context("Failed to read file")?; 96 | let todos = handle_todo_items(content); 97 | 98 | let output = todos_as_waybar_output(todos); 99 | 100 | // Send the expected json output to i3status 101 | println!("{}", serde_json::to_string(&output)?); 102 | 103 | Ok(()) 104 | } 105 | 106 | /// Go through all lines of a todo text and extract information from it. 107 | /// For example, the amount items that were completed. 108 | /// 109 | /// Retuns the next todo headline, we hit one. 110 | fn handle_todo_items(content: String) -> Vec { 111 | let mut todos = Vec::new(); 112 | 113 | let mut todo: Option = None; 114 | for line in content.lines() { 115 | let line = line.trim(); 116 | if line.starts_with('#') { 117 | // We found a new todo 118 | // If we already have one, save it to the list before starting a new one. 119 | if let Some(todo) = &todo { 120 | todos.push(todo.clone()); 121 | } 122 | let name = line.strip_prefix('#').unwrap_or_default().trim(); 123 | todo = Some(Todo::new(name.into())); 124 | } else if let Some(ref mut todo) = todo { 125 | if line.starts_with('-') && !line.starts_with("- [x]") { 126 | let name = line 127 | .strip_prefix("- [ ]") 128 | .or(line.strip_prefix("- []")) 129 | .or(line.strip_prefix("-")) 130 | .unwrap(); 131 | todo.items.push(Item::new(name.to_string(), false)); 132 | } else if line.starts_with("- [x]") { 133 | let name = line 134 | .strip_prefix("- [x]") 135 | .or(line.strip_prefix("-[x]")) 136 | .unwrap(); 137 | todo.items.push(Item::new(name.to_string(), true)); 138 | } 139 | } 140 | } 141 | 142 | if let Some(todo) = todo { 143 | todos.push(todo); 144 | } 145 | 146 | todos 147 | } 148 | -------------------------------------------------------------------------------- /src/fs.rs: -------------------------------------------------------------------------------- 1 | //! All file system related helper functions. 2 | use std::{ 3 | fs::{DirEntry, read_to_string}, 4 | path::{Path, PathBuf}, 5 | time::SystemTime, 6 | }; 7 | 8 | use anyhow::{Context, Result}; 9 | pub use file::*; 10 | pub use path::*; 11 | use shellexpand::tilde; 12 | 13 | pub mod path { 14 | use super::*; 15 | /// Expand the tilde and return a valid PathBuf. 16 | pub fn expand(path: &Path) -> PathBuf { 17 | let path = tilde(&path.to_string_lossy()).to_string(); 18 | PathBuf::from(&path) 19 | } 20 | 21 | /// Check if a file exists. 22 | pub fn path_exists(path: T) -> bool { 23 | Path::new(&tilde(&path.to_string()).to_string()).exists() 24 | } 25 | } 26 | 27 | pub mod file { 28 | use super::*; 29 | 30 | pub enum FileType { 31 | Directory, 32 | File, 33 | } 34 | 35 | pub fn read_file_lines(path: &PathBuf) -> Result> { 36 | let content = read_file(path)?; 37 | Ok(content.split('\n').map(|name| name.to_string()).collect()) 38 | } 39 | 40 | /// Read the contents of a file. 41 | pub fn read_file(path: &PathBuf) -> Result { 42 | read_to_string(path).context(format!("Failed to read file {path:?}")) 43 | } 44 | 45 | /// Read the contents of a file. 46 | pub fn sort_and_write(mut strings: Vec, path: &PathBuf) -> Result<()> { 47 | strings.sort(); 48 | strings.retain(|name| !name.trim().is_empty()); 49 | std::fs::write(path, strings.join("\n")) 50 | .context(format!("Failed to write to file {path:?}")) 51 | } 52 | 53 | /// Read all entries of a directory and return them. 54 | /// If a FileType is specified, only files with that type will be returned. 55 | pub fn read_dir_or_fail(path: &PathBuf, file_type: Option) -> Result> { 56 | let dir = std::fs::read_dir(path)?; 57 | 58 | let mut entries: Vec = Vec::new(); 59 | for entry_result in dir { 60 | let entry = entry_result?; 61 | 62 | // Filter not matching file types 63 | if let Some(file_type) = &file_type { 64 | match file_type { 65 | FileType::Directory => { 66 | if !entry.file_type()?.is_dir() { 67 | continue; 68 | } 69 | } 70 | FileType::File => { 71 | if !entry.file_type()?.is_file() { 72 | continue; 73 | } 74 | } 75 | } 76 | } 77 | 78 | entries.push(entry); 79 | } 80 | 81 | Ok(entries) 82 | } 83 | 84 | /// Return the file with the newest 'modified' date in a directory. 85 | pub fn get_newest_file(path: PathBuf) -> Result> { 86 | let dir = std::fs::read_dir(path)?; 87 | 88 | let mut path: Option = None; 89 | let mut modified = SystemTime::UNIX_EPOCH; 90 | 91 | for entry_result in dir { 92 | let entry = entry_result?; 93 | let metadata = entry.metadata()?; 94 | 95 | // We're looking at the first file. Use it as a base-line. 96 | if path.is_none() { 97 | path = Some(entry.path()); 98 | modified = metadata.modified()?; 99 | continue; 100 | } 101 | 102 | let last_modified = metadata.modified()?; 103 | if last_modified > modified { 104 | modified = last_modified; 105 | path = Some(entry.path()); 106 | } 107 | } 108 | 109 | Ok(path) 110 | } 111 | 112 | /// Walk through the file tree and search for **directory** leaves. 113 | pub fn find_leaf_dirs(path: PathBuf) -> Result> { 114 | let mut search_stack = Vec::new(); 115 | search_stack.push(path); 116 | let mut leaf_dirs = Vec::new(); 117 | 118 | while let Some(path) = search_stack.pop() { 119 | let dir = std::fs::read_dir(&path)?; 120 | // A flag whether this dir contains another dir. 121 | // If it doesn't we push it to the leaf_dirs after the for loop. 122 | let mut found_sub_dir = false; 123 | 124 | for entry_result in dir.into_iter() { 125 | let entry = entry_result?; 126 | 127 | let file_type = entry 128 | .file_type() 129 | .context(format!("Failed to read file type for {:?}", entry.path()))?; 130 | 131 | if file_type.is_dir() { 132 | found_sub_dir = true; 133 | search_stack.push(entry.path()); 134 | } 135 | } 136 | 137 | if !found_sub_dir { 138 | leaf_dirs.push(path); 139 | } 140 | } 141 | 142 | Ok(leaf_dirs) 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /src/bin/templ.rs: -------------------------------------------------------------------------------- 1 | //! Mini tool to quickly template any file. 2 | //! 3 | //! Currently uses Tera for templating, but might switch to `upon` soon. 4 | //! Takes a single template file and multiple files that specify variables. 5 | use std::{ 6 | collections::HashMap, 7 | fs::File, 8 | io::{BufReader, Write}, 9 | path::PathBuf, 10 | }; 11 | 12 | use anyhow::{Context, Result}; 13 | use chrono::{Datelike, TimeDelta}; 14 | use clap::{ArgAction, Parser}; 15 | use log::{debug, info}; 16 | use script_utils::{logging, prelude::*}; 17 | use serde_yaml::Value; 18 | use tera::{Context as TeraContext, Tera}; 19 | 20 | #[derive(Parser, Debug)] 21 | #[clap( 22 | name = "templ", 23 | about = "Apply variables to a template.", 24 | author = "Arne Beer " 25 | )] 26 | struct CliArguments { 27 | /// Verbose mode (-v, -vv, -vvv) 28 | #[clap(short, long, action = ArgAction::Count)] 29 | pub verbose: u8, 30 | 31 | /// The path to the template. 32 | pub template: PathBuf, 33 | 34 | /// Paths to the variable files (only YAML for now). 35 | /// Variables in Files that're passed in later will have precedence. 36 | pub variables: Vec, 37 | 38 | /// Where the output file will be written to. 39 | pub output: PathBuf, 40 | } 41 | 42 | fn main() -> Result<()> { 43 | // Parse commandline options. 44 | let args = CliArguments::parse(); 45 | 46 | logging::init_logger(args.verbose); 47 | 48 | // Read template file 49 | if !args.template.exists() { 50 | eprintln!("Couldn't find template file at path {:?}", args.template); 51 | } 52 | let template = read_file(&args.template).context("Failed to read template file")?; 53 | 54 | // Generate tera context and render the template. 55 | let context = create_context(&args).context("Failed to create Tera context")?; 56 | let rendered = Tera::one_off(&template, &context, false).context("Failed to render file.")?; 57 | 58 | info!("Rendered template:\n##########\n{rendered}\n##########"); 59 | 60 | // Write the template to disk. 61 | let mut file = File::create(&args.output) 62 | .context(format!("Failed to create file at: {:?}", &args.output))?; 63 | file.write_all(rendered.as_bytes()) 64 | .context("Failed to write output to file.")?; 65 | 66 | Ok(()) 67 | } 68 | 69 | fn create_context(args: &CliArguments) -> Result { 70 | let mut context = get_default_context()?; 71 | 72 | for file in args.variables.iter() { 73 | // Read the yaml file into a HashMap of [Value], which can 74 | // be easily consumed by the Tera context. 75 | let file = File::open(file).context(format!( 76 | "Failed to open template file at: {:?}", 77 | &args.variables 78 | ))?; 79 | let reader = BufReader::new(&file); 80 | let variables: HashMap = serde_yaml::from_reader(reader) 81 | .context(format!("Failed to read template file at: {file:?}"))?; 82 | 83 | // Merge all variables together 84 | variables.into_iter().for_each(|(key, value)| { 85 | context.insert(key, value); 86 | }); 87 | } 88 | 89 | let context = TeraContext::from_serialize(context).context("Failed to build tera context.")?; 90 | 91 | debug!("Variables: {:#?}", &context); 92 | 93 | Ok(context) 94 | } 95 | 96 | /// Build a default context for various circumstances 97 | fn get_default_context() -> Result> { 98 | let mut context: HashMap = HashMap::new(); 99 | let today = chrono::Local::now(); 100 | let start_of_month = today 101 | - TimeDelta::try_days(today.day0().into()) 102 | .context("Failed to create start of month time delta")?; 103 | let day_in_last_month = start_of_month - TimeDelta::try_days(10).unwrap(); 104 | 105 | // Add a few German values related to the current date. 106 | let mut de: HashMap = HashMap::new(); 107 | let german_months = [ 108 | "Januar", 109 | "Februar", 110 | "März", 111 | "April", 112 | "Mai", 113 | "Juni", 114 | "Juli", 115 | "August", 116 | "September", 117 | "Oktober", 118 | "November", 119 | "Dezember", 120 | ]; 121 | de.insert( 122 | "current_month".into(), 123 | serde_yaml::to_value(german_months[start_of_month.month0() as usize]).unwrap(), 124 | ); 125 | de.insert( 126 | "year_of_current_month".into(), 127 | serde_yaml::to_value(start_of_month.year()).unwrap(), 128 | ); 129 | de.insert( 130 | "last_month".into(), 131 | serde_yaml::to_value(german_months[day_in_last_month.month0() as usize]).unwrap(), 132 | ); 133 | de.insert( 134 | "year_of_last_month".into(), 135 | serde_yaml::to_value(day_in_last_month.year()).unwrap(), 136 | ); 137 | context.insert( 138 | "de".into(), 139 | serde_yaml::to_value(de).context("Couldn't serialize default values")?, 140 | ); 141 | 142 | Ok(context) 143 | } 144 | -------------------------------------------------------------------------------- /src/bin/dehn-polizei.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fs::{File, remove_file}, 3 | path::PathBuf, 4 | }; 5 | 6 | use anyhow::{Context, Result, anyhow}; 7 | use clap::{ArgAction, Parser}; 8 | use dirs::runtime_dir; 9 | use log::info; 10 | use script_utils::{ 11 | exec::Cmd, 12 | logging, 13 | notify::*, 14 | sleep_seconds, 15 | timer::{Phase, PhaseTimer}, 16 | }; 17 | 18 | #[derive(Debug, Clone, PartialEq)] 19 | pub enum StretchAction { 20 | Initial { stretch_interval: usize }, 21 | Reminder { reminder_interval: usize }, 22 | Suspend, 23 | } 24 | 25 | #[derive(Parser, Debug)] 26 | #[clap( 27 | name = "Dehn-Polizei", 28 | about = "A little background daemon which notifies me that I should do some stretching", 29 | author = "Arne Beer " 30 | )] 31 | pub struct CliArguments { 32 | /// Verbose mode (-v, -vv, -vvv) 33 | #[clap(short, long, action = ArgAction::Count)] 34 | pub verbose: u8, 35 | 36 | #[clap(subcommand)] 37 | cmd: SubCommand, 38 | } 39 | 40 | #[derive(Parser, Debug)] 41 | pub enum SubCommand { 42 | /// Start the daemon. 43 | Start { 44 | /// The interval (in minutes) at which the user will be notified that they should stretch 45 | #[clap(short, long, default_value = "90")] 46 | interval: usize, 47 | 48 | /// The interval at which the user will be reminded if they didn't stretch yet. 49 | #[clap(short, long, default_value = "10")] 50 | reminder_interval: usize, 51 | }, 52 | 53 | /// Signal that you've stretched 54 | Ack {}, 55 | } 56 | 57 | fn main() -> Result<()> { 58 | // Parse commandline options. 59 | let args = CliArguments::parse(); 60 | logging::init_logger(args.verbose); 61 | 62 | match args.cmd { 63 | SubCommand::Start { 64 | interval, 65 | reminder_interval, 66 | } => start(interval, reminder_interval), 67 | SubCommand::Ack {} => { 68 | // Touch an ack file to indicate that the user has stretched. 69 | File::create(ack_file_path()?)?; 70 | Ok(()) 71 | } 72 | } 73 | } 74 | 75 | fn ack_file_path() -> Result { 76 | Ok(runtime_dir() 77 | .ok_or(anyhow!("Couldn't find runtime dir"))? 78 | .join("dehn-polizei-ack")) 79 | } 80 | 81 | fn start(stretch_interval: usize, reminder_interval: usize) -> Result<()> { 82 | info!( 83 | "\n 84 | User will be regularly notified every {stretch_interval} minutes. 85 | They'll receive a follow-up notification every {reminder_interval} minutes\n", 86 | ); 87 | 88 | let phases = vec![ 89 | Phase::one_time( 90 | stretch_interval, 91 | StretchAction::Initial { stretch_interval }, 92 | ), 93 | Phase::recurring_delayed( 94 | stretch_interval, 95 | reminder_interval, 96 | StretchAction::Reminder { reminder_interval }, 97 | ), 98 | Phase::one_time(stretch_interval + 5400, StretchAction::Suspend), 99 | ]; 100 | let mut timer = PhaseTimer::new(phases); 101 | 102 | loop { 103 | sleep_seconds(60); 104 | 105 | // Search for the ack file, if it exists, the user has stretched. 106 | // Reset the timer and remove the file. 107 | if ack_file_path()?.exists() { 108 | remove_file(ack_file_path()?)?; 109 | timer.reset(); 110 | info!("Timer reset - user acknowledged stretch"); 111 | continue; 112 | } 113 | 114 | if let Some(action) = timer.check_with_sleep_detection() { 115 | match action { 116 | StretchAction::Initial { stretch_interval } => { 117 | info!("Sending initial stretch notification"); 118 | let message = format!( 119 | "You have been working for {stretch_interval} minutes.\nTime for a stretch\\!\\!", 120 | ); 121 | notify(20 * 1000, message)?; 122 | } 123 | StretchAction::Reminder { 124 | reminder_interval: _, 125 | } => { 126 | info!("Sending stretch reminder"); 127 | let overdue_minutes = timer.elapsed_minutes() - stretch_interval; 128 | let message = format!("You are {overdue_minutes} minutes overdue! Go stretch!"); 129 | critical_notify(40 * 1000, message)?; 130 | } 131 | StretchAction::Suspend => { 132 | info!("Force suspending"); 133 | let message = "Force suspending. Go stretch!".to_string(); 134 | critical_notify(60 * 1000, message)?; 135 | // Give the user two minutes to respond to this message. 136 | sleep_seconds(120); 137 | Cmd::new("sudo systemctl suspend") 138 | .run_success() 139 | .context("Failed to send notification.")?; 140 | } 141 | } 142 | } 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /src/bin/f.rs: -------------------------------------------------------------------------------- 1 | //! A collection of helpful file system operations. 2 | //! 3 | //! - Get a list of all top-level git repositories 4 | use std::{fs::read_dir, path::PathBuf}; 5 | 6 | use anyhow::Result; 7 | use clap::{ArgAction, Parser}; 8 | use log::debug; 9 | use script_utils::logging; 10 | 11 | #[derive(Parser, Debug)] 12 | #[clap( 13 | name = "File helpers", 14 | about = "A collection of helpful file system operations.", 15 | author = "Arne Beer " 16 | )] 17 | pub struct CliArguments { 18 | /// Verbose mode (-v, -vv, -vvv) 19 | #[clap(short, long, action = ArgAction::Count)] 20 | pub verbose: u8, 21 | 22 | #[clap(subcommand)] 23 | pub cmd: SubCommand, 24 | } 25 | 26 | #[derive(Parser, Debug)] 27 | pub enum SubCommand { 28 | /// Find all git repos in a given directory 29 | FindRepos { 30 | paths: Vec, 31 | #[clap(short, long, default_value = "5")] 32 | max_depth: usize, 33 | #[clap(short, long)] 34 | short: bool, 35 | #[clap(short, long)] 36 | exclude: Vec, 37 | }, 38 | } 39 | 40 | fn main() -> Result<()> { 41 | // Parse commandline options. 42 | let args = CliArguments::parse(); 43 | logging::init_logger(args.verbose); 44 | 45 | match args.cmd { 46 | SubCommand::FindRepos { 47 | paths, 48 | max_depth, 49 | short, 50 | exclude, 51 | } => { 52 | // Find repos up to a depth of 5 directories. 53 | let mut repos = Vec::new(); 54 | for path in paths { 55 | discover_repos(&path, 0, max_depth, &exclude, &mut repos); 56 | } 57 | 58 | // Make sure we're always using the same order. 59 | repos.sort(); 60 | 61 | // Format the list of repos, so each repo is on a new line. 62 | let formatted = repos 63 | .into_iter() 64 | .map(|path| { 65 | // If the full path is requested, return it directly. 66 | if !short { 67 | return path.to_string_lossy().to_string(); 68 | } 69 | 70 | // Check if there's a filename, if not return th e full name. 71 | let Some(basename) = path.file_name() else { 72 | return path.to_string_lossy().to_string(); 73 | }; 74 | 75 | // Return the parent + file_name if possible. 76 | // Otherwise only return the file_name. 77 | let mut name = PathBuf::from(basename); 78 | if let Some(parent) = path.parent().and_then(|dir| dir.file_name()) { 79 | name = PathBuf::from(parent).join(basename); 80 | } 81 | name.to_string_lossy().to_string() 82 | }) 83 | .collect::>() 84 | .join("\n"); 85 | 86 | // Print the list 87 | println!("{formatted}") 88 | } 89 | } 90 | 91 | Ok(()) 92 | } 93 | 94 | /// Discover repositories inside a given folder. 95 | /// 96 | /// This function is copy-pasted from `geil`. 97 | /// If anything changes, consider backporting. 98 | pub fn discover_repos( 99 | path: &PathBuf, 100 | depths: usize, 101 | max_depth: usize, 102 | excluded_dir: &Vec, 103 | new_repos: &mut Vec, 104 | ) { 105 | // Check if this path is in the excluded paths. 106 | // If so, just return. 107 | for excluded in excluded_dir { 108 | if path.starts_with(excluded) { 109 | return; 110 | } 111 | } 112 | 113 | // Check if a .git directory exists. 114 | // If it does, always stop searching. 115 | let git_dir = path.join(".git"); 116 | debug!("{depths} Looking at folder {path:?}"); 117 | if git_dir.exists() { 118 | debug!("Found .git folder"); 119 | // Add the repository, if we don't know it yet. 120 | new_repos.push(path.to_owned()); 121 | return; 122 | } 123 | 124 | // Recursion stop. Only check up to a dephts of 5 125 | if depths == max_depth { 126 | debug!("Max depth reached"); 127 | return; 128 | } 129 | 130 | let current_dir = match read_dir(path) { 131 | Ok(current_dir) => current_dir, 132 | Err(err) => { 133 | debug!("Couldn't read directory at {path:?} with error: {err:?}"); 134 | return; 135 | } 136 | }; 137 | 138 | // The current path is no repository, search it's subdirectories 139 | for entry_result in current_dir { 140 | match entry_result { 141 | Ok(entry) => { 142 | let path = entry.path(); 143 | if !path.is_dir() { 144 | continue; 145 | } 146 | 147 | discover_repos(&path, depths + 1, max_depth, excluded_dir, new_repos); 148 | } 149 | Err(err) => { 150 | debug!("Couldn't read directory path {path:?} with error: {err:?}"); 151 | continue; 152 | } 153 | } 154 | } 155 | } 156 | -------------------------------------------------------------------------------- /src/bin/netinfo.rs: -------------------------------------------------------------------------------- 1 | //! A tool for use in use with status bars. 2 | //! 3 | //! It displays information about currently available network devices such as: 4 | //! - IP Address 5 | //! - Type 6 | //! - Signal strength 7 | use anyhow::Result; 8 | use clap::{ArgAction, Parser}; 9 | use log::{debug, warn}; 10 | use regex::Regex; 11 | use script_utils::{exec::Cmd, ip_addr::*, logging}; 12 | 13 | enum NetworkType { 14 | Ethernet, 15 | Wlan, 16 | Vpn, 17 | } 18 | 19 | #[derive(Parser, Debug)] 20 | #[clap( 21 | name = "netinfo", 22 | about = "Get network info, formatted for a status bar", 23 | author = "Arne Beer " 24 | )] 25 | struct CliArguments { 26 | /// Verbose mode (-v, -vv, -vvv) 27 | #[clap(short, long, action = ArgAction::Count)] 28 | pub verbose: u8, 29 | } 30 | 31 | /// Print a string, representing the current network state with IP. 32 | fn main() -> Result<()> { 33 | // Parse commandline options. 34 | let args = CliArguments::parse(); 35 | logging::init_logger(args.verbose); 36 | 37 | let interfaces = get_interfaces()?; 38 | 39 | let mut output = Vec::new(); 40 | 41 | for interface in interfaces { 42 | // We aren't interested in the loopback device 43 | if interface.ifname == "lo" { 44 | continue; 45 | } 46 | 47 | // Device doesn't have an active connection. 48 | if interface.addr_info.is_empty() || interface.operstate == "DOWN" { 49 | continue; 50 | } 51 | 52 | debug!("Interface info: {interface:#?}"); 53 | 54 | // Try to find an ipv4 address by default. 55 | let addr = interface 56 | .addr_info 57 | .iter() 58 | .find(|addr| addr.family == "inet"); 59 | 60 | // Search for an ipv6 address as fallback. 61 | let addr = if let Some(addr) = addr { 62 | addr 63 | } else { 64 | let ipv6_addr = interface 65 | .addr_info 66 | .iter() 67 | .find(|addr| addr.family == "inet6"); 68 | 69 | match ipv6_addr { 70 | Some(addr) => addr, 71 | None => continue, 72 | } 73 | }; 74 | 75 | let name = interface.ifname; 76 | let ip_addr = &addr.local; 77 | 78 | // Drop any container/virtual environment related networks 79 | if name.starts_with("docker") || name.starts_with("veth") || name.starts_with("br") { 80 | continue; 81 | } 82 | 83 | // Determine the network type based on the name of the interface. 84 | let network_type = if name.starts_with('e') { 85 | NetworkType::Ethernet 86 | } else if name.starts_with('w') { 87 | NetworkType::Wlan 88 | } else { 89 | NetworkType::Vpn 90 | }; 91 | 92 | // Set the symbol for the current network type. 93 | let symbol = match network_type { 94 | NetworkType::Ethernet => "".into(), 95 | NetworkType::Wlan => format!(" {}", wifi_strength(&name)), 96 | NetworkType::Vpn => "".into(), 97 | }; 98 | 99 | output.push(format!("{symbol} {name}: {ip_addr}")); 100 | } 101 | 102 | if output.is_empty() { 103 | println!("No network"); 104 | } else { 105 | println!("{}", output.join(", ")); 106 | } 107 | 108 | Ok(()) 109 | } 110 | 111 | /// Determine the network strength of a given device. 112 | /// -30 dBm Maximum signal strength, you are probably standing right next to the access point / 113 | /// router. -50 dBm Anything down to this level can be regarded as excellent signal strength. 114 | /// -60 dBm This is still good, reliable signal strength. 115 | /// -67 dBm This is the minimum value for all services that require smooth and reliable data 116 | /// traffic. VoIP/VoWi-Fi Video streaming/streaming (not the highest quality) 117 | /// -70 dBm The signal is not very strong, but mostly sufficient. Web, email, and the like 118 | /// -80 dBm Minimum value required to make a connection. 119 | /// You cannot count on a reliable connection or sufficient signal strength to use services at this 120 | /// level. -90 dBm It is very unlikely that you will be able to connect or make use of any services 121 | /// with this signal strength. 122 | pub fn wifi_strength(interface: &str) -> &'static str { 123 | let capture_data = 124 | Cmd::new(format!("iw dev {interface} info | rg '^.*txpower.*'")).run_success(); 125 | // Return an wifi error symbol if the signal strength cannot be determined. 126 | let capture_data = match capture_data { 127 | Ok(capture) => capture, 128 | Err(err) => { 129 | warn!("Got error reading interface info: {err:#?}"); 130 | return ""; 131 | } 132 | }; 133 | 134 | let re = Regex::new(r"txpower (\d*)\.\d* dBm").unwrap(); 135 | 136 | let output = String::from_utf8_lossy(&capture_data.stdout); 137 | 138 | debug!("Iw output: {output:#?}"); 139 | let captures = match re.captures(output.trim()) { 140 | Some(captures) => captures, 141 | None => return "", 142 | }; 143 | 144 | let level: usize = match captures.get(1).unwrap().as_str().parse() { 145 | Ok(level) => level, 146 | Err(_) => return "", 147 | }; 148 | 149 | match level { 150 | 1..=30 => "▇", 151 | 51..=67 => "▅", 152 | 68..=70 => "▃", 153 | 71..=80 => "▁", 154 | 81..=90 => "!", 155 | _ => "!", 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /shell/random_clips.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | 3 | import subprocess 4 | import random 5 | import re 6 | import os 7 | import shutil 8 | import json 9 | import datetime 10 | 11 | 12 | def convert_duration_to_seconds(duration): 13 | """Convert duration in HH:MM:SS.xx format to seconds.""" 14 | hours, minutes, seconds, _ = map(float, re.split("[:.]", duration)) 15 | total_seconds = hours * 3600 + minutes * 60 + seconds 16 | return total_seconds 17 | 18 | 19 | def get_aspect_ratio(filename): 20 | cmd = [ 21 | "ffprobe", 22 | "-v", 23 | "error", 24 | "-select_streams", 25 | "v:0", 26 | "-show_entries", 27 | "stream=width,height", 28 | "-of", 29 | "json", 30 | filename, 31 | ] 32 | result = subprocess.run( 33 | cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True 34 | ) 35 | info = json.loads(result.stdout) 36 | 37 | if "streams" in info and len(info["streams"]) > 0: 38 | width = info["streams"][0]["width"] 39 | height = info["streams"][0]["height"] 40 | print(f"{width}/{height}") 41 | aspect_ratio = width / height 42 | return aspect_ratio 43 | return None 44 | 45 | 46 | def extract_random_clip(input_file, index): 47 | # Get the total duration of the video 48 | result = subprocess.run( 49 | ["ffmpeg", "-i", input_file], stderr=subprocess.PIPE, stdout=subprocess.PIPE 50 | ) 51 | output = result.stderr.decode("utf-8") 52 | 53 | # Extract the duration using regex 54 | duration_match = re.search(r"Duration: (\d{2}:\d{2}:\d{2}.\d{2})", output) 55 | if not duration_match: 56 | print("Could not extract duration from the video file.") 57 | return 58 | duration = duration_match.group(1) 59 | 60 | # Convert the duration to total seconds 61 | duration_seconds = convert_duration_to_seconds(duration) 62 | 63 | if duration_seconds < 420: 64 | print(f"Duration is shorter than 10min: {duration_seconds}\n: {input_file}") 65 | return 66 | 67 | # Calculate a random start time (excluding intro and outro) 68 | start_time = random.randint(60, int(duration_seconds) - 360) 69 | 70 | # Use ffmpeg to extract the clip 71 | file_name = f"{index}.mp4" 72 | output_file = f"random_clips_output/{file_name}" 73 | 74 | command = ["ffmpeg"] 75 | # Force reset of timestamps 76 | command += ["-fflags", "+genpts"] 77 | # Input file 78 | command += ["-i", input_file] 79 | # Duration of clip 80 | command += ["-ss", str(start_time)] 81 | # Start time of clip 82 | command += ["-t", "180"] 83 | # Video Codec 84 | command += ["-c:v", "libx265"] 85 | 86 | scale = "640:480" 87 | # - yadif is an adaptive deinterlacer 88 | # - Video Crop to 4:3 89 | # - Set resolution 90 | # aspect_ratio = get_aspect_ratio(input_file) 91 | # if aspect_ratio and aspect_ratio < (4 / 3): 92 | # # Only add cropping if the clip is wider than 4:3 93 | # command += ["-filter:v", f"yadif,crop=floor(ih/3)*4:ih,scale={scale}"] 94 | # else: 95 | command += ["-filter:v", f"yadif,scale={scale}"] # No cropping needed 96 | 97 | # Avoid pixel format issues 98 | command += ["-pix_fmt", "yuv420p"] 99 | # Video Framerate + ensure constant framerate 100 | command += ["-r", "24", "-fps_mode", "cfr"] 101 | # Audio Codec 102 | command += ["-c:a", "aac"] 103 | # Audio bitrate 104 | command += ["-b:a", "192k"] 105 | # Audio Sample rate 106 | command += ["-ar", "48000"] 107 | # Audio channels to Stereo 108 | command += ["-ac", "2"] 109 | # Output file 110 | command += [output_file] 111 | 112 | subprocess.run(command) 113 | 114 | print( 115 | f"Random 5-minute clip from {input_file}" 116 | + f"Extracted from {start_time} seconds in." 117 | + "Output saved as {output_file}." 118 | ) 119 | 120 | # Write the file name to the concat file so ffmpeg knows what to concat lateron. 121 | with open("random_clips_output/concat.txt", "a") as file: 122 | file.write(f"file {file_name}\n") 123 | 124 | 125 | def main_fn(): 126 | # Get the current directory 127 | current_directory = os.getcwd() 128 | 129 | if os.path.exists("random_clips_output"): 130 | shutil.rmtree("random_clips_output") 131 | if os.path.exists("output.mp4"): 132 | os.remove("output.mp4") 133 | os.makedirs("random_clips_output", exist_ok=True) 134 | 135 | # Walk through all directories and subdirectories and get all clips. 136 | mkv_files = [] 137 | for root, _, files in os.walk(f"{current_directory}/input"): 138 | for file in files: 139 | if file.endswith(".mkv") or file.endswith(".mp4"): 140 | # Get the full path of the .mkv file 141 | full_path = os.path.join(root, file) 142 | mkv_files.append(full_path) 143 | 144 | # Randomize the list 145 | random.shuffle(mkv_files) 146 | 147 | # Print the list of .mkv files with their full paths 148 | for index, file in enumerate(mkv_files): 149 | now = datetime.datetime.now() 150 | print(f"\n\n{now}: Encoding clip {index}") 151 | extract_random_clip(file, index) 152 | 153 | # Concat the files. 154 | subprocess.run( 155 | [ 156 | "ffmpeg", 157 | "-f", 158 | "concat", 159 | "-safe", 160 | "0", 161 | "-i", 162 | "random_clips_output/concat.txt", 163 | "-c", 164 | "copy", 165 | "output.mp4", 166 | ] 167 | ) 168 | 169 | 170 | if __name__ == "__main__": 171 | main_fn() 172 | -------------------------------------------------------------------------------- /src/bin/clean_names.rs: -------------------------------------------------------------------------------- 1 | //! Remove unwanted or unnecessary bits from filenames. 2 | //! This is mostly for use when working with files from Windows users. 3 | //! 4 | //! They somehow love to put "[some tag]", "{}", "-" and other stuff in their filenames. 5 | use std::{env::current_dir, path::PathBuf}; 6 | 7 | use script_utils::*; 8 | 9 | fn main() -> Result<()> { 10 | setup(); 11 | 12 | let current_dir = current_dir()?; 13 | rename_directories(current_dir)?; 14 | 15 | Ok(()) 16 | } 17 | 18 | /// Remove all invalid characters and substrings from directories in the given directory. 19 | fn rename_directories(path: PathBuf) -> Result<()> { 20 | let dirs = read_dir_or_fail(&path, Some(FileType::Directory))?; 21 | 22 | for dir in dirs { 23 | let path = dir.path(); 24 | let filename = path 25 | .file_name() 26 | .ok_or_else(|| anyhow!(format!("Couldn't get filename from path: {path:?}")))?; 27 | let filename = filename 28 | .to_str() 29 | .ok_or_else(|| anyhow!(format!("Filename contains invalid utf8: {filename:?}")))?; 30 | 31 | let mut chars: Vec = filename.chars().collect(); 32 | // Check for each brace, if there is are matching pairs of braces in the path. 33 | // Everything between those braces will be removed. 34 | for (start, end) in get_braces() { 35 | // Search for pairs, until we no longer find some. 36 | loop { 37 | let mut start_index: Option = None; 38 | let mut end_index: Option = None; 39 | for (index, c) in chars.iter().enumerate() { 40 | if start_index.is_none() && *c == start { 41 | start_index = Some(index); 42 | } 43 | 44 | // We found an matching end brace. 45 | // Break the loop, remove the matching part of the name and start anew. 46 | if start_index.is_some() && *c == end { 47 | end_index = Some(index); 48 | break; 49 | } 50 | } 51 | 52 | // We couldn't find a matching pair. This is our exit condition. 53 | if start_index.is_none() || end_index.is_none() { 54 | break; 55 | } 56 | 57 | // Remove the subslice. 58 | chars.drain(start_index.unwrap()..end_index.unwrap()); 59 | } 60 | } 61 | 62 | // Get all indices of invalid characters. 63 | let mut chars_to_remove = Vec::new(); 64 | let invalid_characters = invalid_characters(); 65 | for (index, c) in chars.iter().enumerate() { 66 | if invalid_characters.contains(c) { 67 | chars_to_remove.push(index); 68 | } 69 | } 70 | 71 | // Remove all invalid char from the back to the front. 72 | // Needed to prevent invalid indices due to inded shifting on removal. 73 | chars_to_remove.reverse(); 74 | for c in chars_to_remove { 75 | chars.remove(c); 76 | } 77 | 78 | // Replace all unwanted characters with their replacement. 79 | for (target, replacement) in chars_to_replace() { 80 | chars = chars 81 | .iter() 82 | .map(|c| if *c == target { replacement } else { *c }) 83 | .collect(); 84 | } 85 | 86 | // Compile the modified character list into a new string. 87 | let mut new_name: String = chars.into_iter().collect(); 88 | 89 | // Remove trailing/preceeding whitespaces 90 | for c in trailing_chars() { 91 | while let Some(stripped) = new_name.strip_prefix(c) { 92 | new_name = stripped.to_owned(); 93 | } 94 | while let Some(stripped) = new_name.strip_suffix(c) { 95 | new_name = stripped.to_owned(); 96 | } 97 | } 98 | 99 | let mut new_path = path.clone(); 100 | new_path.set_file_name(&new_name); 101 | 102 | println!("Moving a) to b):\na) '{filename:?}'\nb) '{new_name:?}'\n"); 103 | std::fs::rename(path, new_path)?; 104 | } 105 | 106 | Ok(()) 107 | } 108 | 109 | fn get_braces() -> Vec<(char, char)> { 110 | vec![('[', ']'), ('(', ')'), ('{', '}')] 111 | } 112 | 113 | /// Return all chars that are considered invalid in our filename. 114 | fn invalid_characters() -> Vec { 115 | let mut chars = vec![';']; 116 | for (start, end) in get_braces() { 117 | chars.push(start); 118 | chars.push(end); 119 | } 120 | 121 | chars 122 | } 123 | 124 | /// Chars that should be replaced with another char. 125 | fn chars_to_replace() -> Vec<(char, char)> { 126 | vec![('~', '-')] 127 | } 128 | 129 | /// Trailing characters that should be removed entirely. 130 | fn trailing_chars() -> Vec { 131 | vec![' ', '\n', '\r'] 132 | } 133 | 134 | #[cfg(test)] 135 | mod test { 136 | use std::{ 137 | fs::{create_dir, remove_dir_all}, 138 | path::Path, 139 | }; 140 | 141 | use super::*; 142 | 143 | #[test] 144 | fn test_directory_cleanup() -> Result<()> { 145 | // Create test directory. 146 | let parent_dir = Path::new("/tmp/clean_names_test_dir"); 147 | create_dir(parent_dir)?; 148 | 149 | // Create a directory whose name should be cleaned. 150 | let inner_dir = parent_dir 151 | .join(" [this is some_test] Name that should stay;(and some) {more random} (stuff)"); 152 | create_dir(inner_dir)?; 153 | 154 | // Clean directory name and ensure it looks as expected. 155 | rename_directories(parent_dir.to_path_buf())?; 156 | assert!( 157 | Path::new("/tmp/clean_names_test_dir/Name that should stay").exists(), 158 | "The directory hasn' been correctly renamed" 159 | ); 160 | 161 | // Cleanup 162 | remove_dir_all(parent_dir)?; 163 | Ok(()) 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /src/bin/headphone_battery.rs: -------------------------------------------------------------------------------- 1 | //! Small helper script to get the battery status of my various wireless headphones. 2 | 3 | use anyhow::Result; 4 | use clap::{ArgAction, Parser}; 5 | use log::warn; 6 | use script_utils::{exec::Cmd, i3status::CustomBarStatus, logging}; 7 | 8 | #[derive(Parser, Debug)] 9 | #[clap( 10 | name = "headphone_battery", 11 | about = "Get the battery status of various headphones.", 12 | author = "Arne Beer " 13 | )] 14 | struct CliArguments { 15 | /// Verbose mode (-v, -vv, -vvv) 16 | #[clap(short, long, action = ArgAction::Count)] 17 | pub verbose: u8, 18 | } 19 | 20 | fn main() -> Result<()> { 21 | // Parse commandline options. 22 | let args = CliArguments::parse(); 23 | logging::init_logger(args.verbose); 24 | 25 | // Check headsetcontrol first 26 | let mut device_status = headsetcontrol(); 27 | 28 | // Check bluetoothctl next. 29 | if device_status == DeviceStatus::Unavailable { 30 | device_status = bluetoothctl(); 31 | } 32 | 33 | // If we got some headphone info, format and print it. 34 | let state = state_from_battery_status(&device_status); 35 | 36 | let inner_text = match device_status { 37 | DeviceStatus::Charging { percentage } => { 38 | if let Some(percentage) = percentage { 39 | format!("{percentage}% ") 40 | } else { 41 | "".to_string() 42 | } 43 | } 44 | DeviceStatus::Available { percentage } => format!("{percentage}%"), 45 | DeviceStatus::Unavailable => { 46 | // We didn't get any info, return an empty response. 47 | println!("{}", serde_json::to_string(&CustomBarStatus::default())?); 48 | return Ok(()); 49 | } 50 | }; 51 | 52 | let text = format!("( {inner_text})"); 53 | let mut status = CustomBarStatus::new(text); 54 | status.class = state.into(); 55 | let json = serde_json::to_string(&status)?; 56 | println!("{json}"); 57 | 58 | Ok(()) 59 | } 60 | 61 | #[derive(PartialEq)] 62 | enum DeviceStatus { 63 | Charging { percentage: Option }, 64 | Available { percentage: usize }, 65 | Unavailable, 66 | } 67 | 68 | /// Determine the i3status state for this section. 69 | /// The color will change if the battery reaches certain states. 70 | fn state_from_battery_status(battery_status: &DeviceStatus) -> &str { 71 | match battery_status { 72 | DeviceStatus::Charging { .. } => "good", 73 | DeviceStatus::Available { percentage } => match percentage { 74 | 0..=15 => "critical", 75 | 16..=25 => "warning", 76 | 26..=35 => "normal", 77 | _ => "good", 78 | }, 79 | _ => "good", 80 | } 81 | } 82 | 83 | // First check `headsetcontrol`. 84 | // 85 | fn headsetcontrol() -> DeviceStatus { 86 | let result = Cmd::new("headsetcontrol --battery").run_success(); 87 | let output = match result { 88 | Ok(capture) => capture.stdout_str(), 89 | Err(err) => { 90 | warn!("Got error on headsetcontrol call:\n{err:#?}"); 91 | return DeviceStatus::Unavailable; 92 | } 93 | }; 94 | 95 | enum Availablility { 96 | Charging, 97 | Available, 98 | Unavailable, 99 | } 100 | 101 | // Output looks like this: 102 | // ``` 103 | // Found SteelSeries Arctis Nova 7 (Arctis Nova 7)! 104 | // 105 | // Battery: 106 | // Status: BATTERY_AVAILABLE 107 | // Level: 100% 108 | // ``` 109 | let mut availability = Availablility::Unavailable; 110 | for line in output.lines() { 111 | let line = line.trim(); 112 | if line.starts_with("Status:") { 113 | let parts: Vec = line.split(' ').map(|s| s.to_string()).collect(); 114 | let status_str = &parts[1]; 115 | 116 | availability = match status_str.as_str() { 117 | "BATTERY_CHARGING" => Availablility::Charging, 118 | "BATTERY_AVAILABLE" => Availablility::Available, 119 | _ => Availablility::Unavailable, 120 | }; 121 | } 122 | 123 | // Battery output of the command looks like this 124 | if line.starts_with("Level:") { 125 | let parts: Vec = line.split(' ').map(|s| s.to_string()).collect(); 126 | let battery = &parts[1]; 127 | 128 | // Remove the percentage sign 129 | let Ok(percentage) = battery.trim_end_matches('%').parse() else { 130 | warn!("Failed to parse battery value to usize: {battery}"); 131 | return DeviceStatus::Unavailable; 132 | }; 133 | 134 | let status = match availability { 135 | Availablility::Charging => DeviceStatus::Charging { 136 | percentage: Some(percentage), 137 | }, 138 | Availablility::Available => DeviceStatus::Available { percentage }, 139 | Availablility::Unavailable => DeviceStatus::Unavailable, 140 | }; 141 | 142 | return status; 143 | } 144 | } 145 | 146 | DeviceStatus::Unavailable 147 | } 148 | 149 | // First check `bluetoothctl`. 150 | fn bluetoothctl() -> DeviceStatus { 151 | let result = Cmd::new("bluetoothctl info").run_success(); 152 | let output = match result { 153 | Ok(capture) => capture.stdout_str(), 154 | Err(err) => { 155 | warn!("Got error on headsetcontrol call:\n{err:#?}"); 156 | return DeviceStatus::Unavailable; 157 | } 158 | }; 159 | 160 | // Output of the command looks like this: 161 | // ``` 162 | // ... 163 | // RSSI: 0xffffffc2 (-62) 164 | // TxPower: 0xfffffff9 (-7) 165 | // Battery Percentage: 0x64 (100) 166 | // ``` 167 | for line in output.lines() { 168 | // Battery output of the command looks like this 169 | if line.trim().starts_with("Battery Percentage:") { 170 | // Split at `(` to get the last part: `100)` 171 | let parts: Vec = line.split('(').map(|s| s.to_string()).collect(); 172 | let battery = &parts[1]; 173 | 174 | // Remove the closing bracket 175 | let Ok(percentage) = battery.trim_end_matches(')').parse() else { 176 | warn!("Failed to parse battery value to usize: {battery}"); 177 | return DeviceStatus::Unavailable; 178 | }; 179 | 180 | return DeviceStatus::Available { percentage }; 181 | } 182 | } 183 | 184 | DeviceStatus::Unavailable 185 | } 186 | -------------------------------------------------------------------------------- /src/pipewire/sink.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, Result, bail}; 2 | use log::{debug, error, info, trace, warn}; 3 | use strum::Display; 4 | 5 | use super::{ 6 | Device, 7 | schema::{node::Node, parse_pw_dump}, 8 | }; 9 | use crate::{exec::Cmd, notify::*, ring::Ring}; 10 | 11 | // Some sinks are just uninteresting for me. 12 | const IGNORED_SINKS: &[&str] = &[ 13 | // Sony/Philips Digital Interface 14 | // Coaxial/Optical input 15 | "USB Audio S/PDIF Output", 16 | ]; 17 | 18 | /// Get a map of all audio sink noes. 19 | pub fn get_sinks() -> Result> { 20 | let (devices, nodes) = parse_pw_dump()?; 21 | 22 | let mut valid_nodes = Vec::new(); 23 | 24 | // Run through all devices and find the one we desire. 25 | for node in nodes.into_iter() { 26 | let props = &node.info.props; 27 | // We are only interested in devices of Audio/Sink type. 28 | if &props.media_class != "Audio/Sink" { 29 | continue; 30 | } 31 | 32 | // Skip all ignored sinks 33 | if IGNORED_SINKS.contains(&props.node_description.as_str()) { 34 | continue; 35 | } 36 | 37 | // Ignore any sinks where we can safely say that they aren't plugged in. 38 | if is_not_plugged_in(&node, &devices) { 39 | continue; 40 | } 41 | 42 | info!( 43 | "Found device {}: {} ({})", 44 | props.object_serial, props.node_description, props.node_name 45 | ); 46 | trace!("Raw: {node:#?}"); 47 | 48 | valid_nodes.push(node); 49 | } 50 | 51 | Ok(valid_nodes) 52 | } 53 | 54 | /// Check whether the physical connection for a node is actually plugged in or not. 55 | /// To check this, we have to search the associated device for the node, go through its profiles 56 | /// and get the profile for that node. 57 | /// 58 | /// The way we're currently doing this is by comparing the node's profile description 59 | /// and the actual profile description. 60 | /// However, this doesn't always work, as these descriptions seem to sometimes differ. 61 | /// 62 | /// We handle any error graceful and return `false`. I.e. if: 63 | /// - No device is found 64 | /// - No matching profile is found 65 | /// - The status of the profile is `unknown`. 66 | fn is_not_plugged_in(node: &Node, devices: &[Device]) -> bool { 67 | let device_id = &node.info.props.device_id; 68 | 69 | // Ensure that there's a device profile description and name. 70 | // Without this, we cannot check whether the node is actually plugged in. 71 | // 72 | // For now, this was only the case for devices like Bluetooth Headsets, 73 | // which are only present if connected. 74 | let Some(profile_description) = &node.info.props.device_profile_description else { 75 | return false; 76 | }; 77 | let Some(profile_name) = &node.info.props.device_profile_name else { 78 | return false; 79 | }; 80 | 81 | // Get the device 82 | let Some(device) = devices.iter().find(|device| device.id == *device_id) else { 83 | return false; 84 | }; 85 | 86 | // Go through all profiles 87 | for profile in &device.info.params.profiles { 88 | // There's a bit of inconsistency over here. 89 | // From what I've seen, there're several possible ways of finding the matching route. 90 | // 91 | // - The description matches perfectly 92 | // - The profile name matches the node's profile name prefixed with `output` 93 | if !(&profile.description == profile_description 94 | || profile.name == format!("output:{profile_name}")) 95 | { 96 | continue; 97 | } 98 | 99 | // If we found a matching route, check if it's not plugged in 100 | return profile.available == "no"; 101 | } 102 | 103 | // Check all routes. 104 | // Some profile-names seem to reference routes, which is pretty confusing 105 | for route in &device.info.params.routes { 106 | // There's a bit of inconsistency over here. 107 | // From what I've seen, there're several possible ways of finding the matching route. 108 | // 109 | // - The description matches perfectly 110 | // - The profile name matches the node's profile name prefixed with `output` 111 | if !(&route.description == profile_description 112 | || route.name == format!("output:{profile_name}")) 113 | { 114 | continue; 115 | } 116 | 117 | // If we found a matching route, check if it's not plugged in 118 | return route.available == "no"; 119 | } 120 | 121 | false 122 | } 123 | 124 | #[derive(Display)] 125 | pub enum Direction { 126 | Next, 127 | Previous, 128 | } 129 | 130 | /// Try to determine the id and description of the targeted sink. 131 | /// May return None if the target sink cannot be found. 132 | pub fn rotate_sink(direction: Direction) -> Result> { 133 | // Determine the current sink. 134 | let output = Cmd::new("pactl get-default-sink") 135 | .run_success() 136 | .context("Failed to find default sink")?; 137 | let current_sink_name = output.stdout_str().trim().to_owned(); 138 | debug!("Current sink name: {current_sink_name}"); 139 | 140 | // Initialize the device ring for easy iteration to the next/previous device. 141 | let nodes = get_sinks()?; 142 | let mut ring = Ring::new(nodes.clone())?; 143 | 144 | // Move the cursor to the current node. 145 | // If `None` is found, return an error as we cannot determine the current sink. 146 | let current_device = ring.find(|(_, node)| node.info.props.node_name == current_sink_name); 147 | if current_device.is_none() { 148 | error!("Could not determine current sink: {current_sink_name}"); 149 | critical_notify( 150 | 1500, 151 | format!("Could not determine current sink: {current_sink_name}"), 152 | )?; 153 | bail!("Failed to determine current sink"); 154 | } 155 | 156 | // Check if we find a node for the given name. 157 | let node = match direction { 158 | Direction::Next => Some((ring.next()).clone()), 159 | Direction::Previous => Some((ring.prev()).clone()), 160 | }; 161 | 162 | if let Some(ref node) = node { 163 | debug!( 164 | "{direction} sink name: {}", 165 | node.info.props.node_description 166 | ); 167 | } 168 | 169 | Ok(node.clone()) 170 | } 171 | 172 | /// Search all inputs and switch them over to the given device. 173 | pub fn move_inputs_to_sink(node_object_serial: usize) -> Result<()> { 174 | // Get all currently active sink inputs. 175 | // Output format looks like this: 176 | // 177 | // 188 56 187 PipeWire float32le 2ch 48000Hz 178 | // 179 | // We're interested in the first number. 180 | let capture = Cmd::new("pactl list short sink-inputs").run_success()?; 181 | 182 | let input_ids: Vec = capture 183 | .stdout_str() 184 | .split('\n') 185 | .filter(|line| !line.trim().is_empty()) 186 | .filter_map(|line| line.split('\t').next().map(|id| id.to_string())) 187 | .collect(); 188 | 189 | debug!("Input Ids: {input_ids:?}"); 190 | 191 | for id in input_ids { 192 | let result = 193 | Cmd::new(format!("pactl move-sink-input {id} {node_object_serial}")).run_success(); 194 | if let Err(err) = result { 195 | warn!("Failed to switch input {id} to new sink: {err:?}"); 196 | }; 197 | } 198 | 199 | Ok(()) 200 | } 201 | -------------------------------------------------------------------------------- /src/bin/polizei.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::{HashMap, HashSet}, 3 | fs::{File, remove_file}, 4 | path::PathBuf, 5 | }; 6 | 7 | use anyhow::{Result, anyhow}; 8 | use clap::{ArgAction, Parser}; 9 | use dirs::runtime_dir; 10 | use log::{debug, info}; 11 | use script_utils::{ 12 | logging, 13 | notify::*, 14 | process::get_process_cmdlines, 15 | sleep_seconds, 16 | timer::{Phase, PhaseTimer}, 17 | }; 18 | 19 | #[derive(Parser, Debug)] 20 | #[clap( 21 | name = "Polizei", 22 | about = "A little background daemon which notifies the user if they play games for too long.", 23 | author = "Arne Beer " 24 | )] 25 | pub struct CliArguments { 26 | /// Verbose mode (-v, -vv, -vvv) 27 | #[clap(short, long, action = ArgAction::Count)] 28 | pub verbose: u8, 29 | 30 | #[clap(subcommand)] 31 | cmd: SubCommand, 32 | } 33 | 34 | #[derive(Parser, Debug)] 35 | pub enum SubCommand { 36 | /// Start the daemon. 37 | Start { 38 | /// The interval (in minutes) at which the user will be notified that they've 39 | /// been playing for a certain amount of time. 40 | #[clap(short, long, default_value = "60")] 41 | notification_interval: i64, 42 | 43 | /// The threshold at which the user will be notified to stop playing. 44 | #[clap(short, long, default_value = "120")] 45 | threshold: i64, 46 | 47 | /// The interval at which the user will be notified to stop playing. 48 | #[clap(short, long, default_value = "10")] 49 | stop_notification_interval: i64, 50 | }, 51 | 52 | /// Signal that you've acknowledged the gaming notification 53 | Ack {}, 54 | } 55 | 56 | // A mapping of the games to watch 57 | // 1. Names of the game. 58 | // 2. Substrings of the binary we should look for. 59 | // 3. Whether we should warn the user if the threshold was exceeded. 60 | const GAME_LIST: &[(&str, &str, bool)] = &[ 61 | ("Oxygen Not Included", "OxygenNotIncluded", true), 62 | ("Factorio", "factorio", true), 63 | ("Noita", "noita", true), 64 | ("Apex Legends", "apex", false), 65 | ("Satisfactory", "satisfactory", true), 66 | ("Starsector", "starsector", true), 67 | ("Stardew Valley", "stardew", true), 68 | ("Terraria", "terraria", true), 69 | ("Necesse", "necesse", true), 70 | ("some game", "streaming_client", true), 71 | ("Minecraft", "atlauncher.jar", true), 72 | ("Zero Sievert", "zero sievert.exe", true), 73 | ]; 74 | 75 | #[derive(Debug, Clone, PartialEq)] 76 | pub enum GameAction { 77 | RegularNotification, 78 | StopNotification, 79 | } 80 | 81 | struct RunningGame { 82 | timer: PhaseTimer, 83 | } 84 | 85 | impl RunningGame { 86 | fn new( 87 | notification_interval: i64, 88 | threshold: i64, 89 | stop_notification_interval: i64, 90 | strict: bool, 91 | ) -> Self { 92 | let mut phases = vec![]; 93 | 94 | // Add regular notification phase (recurring from start if interval > 0) 95 | if notification_interval > 0 { 96 | phases.push(Phase::recurring( 97 | notification_interval as usize, 98 | notification_interval as usize, 99 | GameAction::RegularNotification, 100 | )); 101 | } 102 | 103 | // Add stop notification phase (recurring from threshold if strict) 104 | if strict && stop_notification_interval > 0 { 105 | phases.push(Phase::recurring( 106 | threshold as usize, 107 | stop_notification_interval as usize, 108 | GameAction::StopNotification, 109 | )); 110 | } 111 | 112 | Self { 113 | timer: PhaseTimer::new(phases), 114 | } 115 | } 116 | 117 | fn elapsed_minutes(&self) -> usize { 118 | self.timer.elapsed_minutes() 119 | } 120 | 121 | fn check(&mut self) -> Option { 122 | self.timer.check_with_sleep_detection() 123 | } 124 | } 125 | 126 | fn ack_file_path() -> Result { 127 | Ok(runtime_dir() 128 | .ok_or(anyhow!("Couldn't find runtime dir"))? 129 | .join("polizei-ack")) 130 | } 131 | 132 | fn main() -> Result<()> { 133 | // Parse commandline options. 134 | let args = CliArguments::parse(); 135 | logging::init_logger(args.verbose); 136 | 137 | match args.cmd { 138 | SubCommand::Start { 139 | notification_interval, 140 | threshold, 141 | stop_notification_interval, 142 | } => start(notification_interval, threshold, stop_notification_interval), 143 | SubCommand::Ack {} => { 144 | // Touch an ack file to indicate that the user has acknowledged the gaming notification. 145 | File::create(ack_file_path()?)?; 146 | Ok(()) 147 | } 148 | } 149 | } 150 | 151 | fn start( 152 | notification_interval: i64, 153 | threshold: i64, 154 | stop_notification_interval: i64, 155 | ) -> Result<()> { 156 | let mut running_games: HashMap<&'static str, RunningGame> = HashMap::new(); 157 | let current_user_id = users::get_current_uid(); 158 | info!( 159 | "\n 160 | User will be regularily notified every {} minutes. 161 | After {} minutes they'll be prompted to stop. 162 | From then on they'll receive a notification every {} minutes\n", 163 | notification_interval, threshold, stop_notification_interval, 164 | ); 165 | 166 | // Check every few minutes whether any games are up and running. 167 | // If they're running for the specified times, notify the user of this. 168 | // Get more annoying if they're running past the threshold. 169 | loop { 170 | let processes = get_process_cmdlines(current_user_id)?; 171 | 172 | // Search for the ack file, if it exists, the user has acknowledged the notification. 173 | // Reset all timers and remove the file. 174 | if ack_file_path()?.exists() { 175 | remove_file(ack_file_path()?)?; 176 | for game in running_games.values_mut() { 177 | game.timer.reset(); 178 | } 179 | info!("Timers reset - user acknowledged gaming notification"); 180 | } 181 | 182 | let mut found_games: HashSet<&'static str> = HashSet::new(); 183 | // Check all processes for the specified binaries. 184 | for cmdline in processes { 185 | debug!("Looking at process: {cmdline}"); 186 | for (name, binary, strict) in GAME_LIST { 187 | // The cmdline doesn't contain the game just exit early. 188 | if !cmdline.to_lowercase().contains(binary) { 189 | continue; 190 | } 191 | 192 | info!("Found running game {name}"); 193 | found_games.insert(name); 194 | handle_running_game( 195 | notification_interval, 196 | threshold, 197 | stop_notification_interval, 198 | &mut running_games, 199 | name, 200 | *strict, 201 | )?; 202 | break; 203 | } 204 | } 205 | 206 | // Remove games that're no longer active. 207 | for key in running_games.keys().copied().collect::>() { 208 | if !found_games.contains(key) { 209 | info!("{key} has been closed."); 210 | // Remove the game from the list of running games. 211 | running_games.remove(key); 212 | } 213 | } 214 | 215 | sleep_seconds(60); 216 | } 217 | } 218 | 219 | fn handle_running_game( 220 | notification_interval: i64, 221 | threshold: i64, 222 | stop_notification_interval: i64, 223 | running_games: &mut HashMap<&'static str, RunningGame>, 224 | name: &'static str, 225 | strict: bool, 226 | ) -> Result<()> { 227 | let running_game = running_games.entry(name).or_insert_with(|| { 228 | RunningGame::new( 229 | notification_interval, 230 | threshold, 231 | stop_notification_interval, 232 | strict, 233 | ) 234 | }); 235 | 236 | if let Some(action) = running_game.check() { 237 | let elapsed_minutes = running_game.elapsed_minutes() as i64; 238 | let time_string = format_duration(elapsed_minutes); 239 | 240 | match action { 241 | GameAction::RegularNotification => { 242 | info!("Sending normal notification for {name} at {time_string}"); 243 | notify( 244 | 10 * 1000, 245 | format!("You have been playing {name} for {time_string}"), 246 | )?; 247 | } 248 | GameAction::StopNotification => { 249 | info!("Sending stop notification for {name} at {time_string}"); 250 | critical_notify( 251 | 300 * 1000, 252 | format!("Stop playing {name}. You are at it since {time_string}"), 253 | )?; 254 | } 255 | } 256 | } 257 | 258 | Ok(()) 259 | } 260 | 261 | fn format_duration(elapsed_minutes: i64) -> String { 262 | let minutes = elapsed_minutes % 60; 263 | let hours = elapsed_minutes / 60; 264 | 265 | if hours == 0 { 266 | format!("{minutes} Minutes") 267 | } else { 268 | format!("{hours} Hours and {minutes} Minutes") 269 | } 270 | } 271 | -------------------------------------------------------------------------------- /src/bin/blur.rs: -------------------------------------------------------------------------------- 1 | //! Create a blurred image from the current screen. 2 | //! 3 | //! 1. Get a current screenshot via scrot. 4 | //! 2. Run a custom point filter on the image data. 5 | //! 3. Save it. 6 | use std::{ 7 | fs::{File, read_to_string, remove_file}, 8 | path::Path, 9 | process::Command, 10 | time::Instant, 11 | }; 12 | 13 | use anyhow::{Context, Result}; 14 | use clap::{ArgAction, Parser}; 15 | use dirs::runtime_dir; 16 | use image::{ 17 | DynamicImage, 18 | ImageBuffer, 19 | ImageFormat, 20 | Pixel, 21 | Rgb, 22 | RgbImage, 23 | codecs::webp::WebPEncoder, 24 | load_from_memory_with_format, 25 | }; 26 | use log::debug; 27 | use rayon::{ 28 | iter::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator}, 29 | slice::ParallelSliceMut, 30 | }; 31 | use script_utils::{bail, logging}; 32 | use serde::Deserialize; 33 | 34 | #[derive(Parser, Debug)] 35 | #[clap( 36 | name = "blur", 37 | about = "Make screenshots and blur them for use in a wayland lockscreen", 38 | author = "Arne Beer " 39 | )] 40 | struct CliArguments { 41 | /// The scale we should blur to. 42 | /// I.e. `5` would result in a relative 20% downscale. 43 | #[clap(default_value = "5")] 44 | pub scale: usize, 45 | 46 | /// Verbose mode (-v, -vv, -vvv) 47 | #[clap(short, long, action = ArgAction::Count)] 48 | pub verbose: u8, 49 | } 50 | 51 | #[derive(Debug, Deserialize)] 52 | struct Config { 53 | pub monitors: Vec, 54 | } 55 | 56 | fn main() -> Result<()> { 57 | // Parse commandline options. 58 | let args = CliArguments::parse(); 59 | logging::init_logger(args.verbose); 60 | 61 | let config_path = dirs::config_dir().unwrap().join("blurlock.toml"); 62 | let config: Config = 63 | toml::from_str(&read_to_string(config_path).context("Failed to read config file")?) 64 | .context("Failed to deserialize config file")?; 65 | 66 | let runtime_dir = runtime_dir().context("Expected to find runtime dir.")?; 67 | 68 | // Parallelized screenshot creation for every known monitor in the list. 69 | config.monitors.par_iter().for_each(|monitor| { 70 | let image_buffer = get_screenshot(monitor).expect("Failed to get screenshot"); 71 | let mut image = load_image(image_buffer).expect("Failed to load screenshot image"); 72 | 73 | // Blur the image and write it the file. 74 | image = blur_image(args.scale, image).expect("Failed to blur screenshot image"); 75 | 76 | write_image(&runtime_dir, monitor, image) 77 | .expect("Failed to write screenshot image to runtime dir"); 78 | }); 79 | 80 | Ok(()) 81 | } 82 | 83 | /// Make a screenshot via scrot and capture the image (png) bytes. 84 | fn get_screenshot(monitor: &str) -> Result> { 85 | let start = Instant::now(); 86 | let output = Command::new("grim") 87 | // JPEG with super low quali for speed 88 | .args(["-t", "jpeg", "-q", "20"]) 89 | // Specify the output monitor 90 | .arg("-o") 91 | .arg(monitor) 92 | // Write to stdout 93 | .arg("-") 94 | .output() 95 | .expect("failed to execute grim"); 96 | 97 | if !output.status.success() { 98 | bail!( 99 | "Failed to run scrot command!\nstdout: {}\nstderr: {}", 100 | String::from_utf8_lossy(&output.stdout), 101 | String::from_utf8_lossy(&output.stderr), 102 | ) 103 | } 104 | 105 | debug!( 106 | "screenshot execution time: {}ms", 107 | start.elapsed().as_millis() 108 | ); 109 | 110 | Ok(output.stdout) 111 | } 112 | 113 | /// Initialize the image from the raw bytes. 114 | fn load_image(buffer: Vec) -> Result, Vec>> { 115 | let start = Instant::now(); 116 | 117 | let image = load_from_memory_with_format(&buffer, ImageFormat::Jpeg)?; 118 | let image = match image { 119 | DynamicImage::ImageRgb8(image) => image, 120 | _ => bail!("Expected Rgb8 format"), 121 | }; 122 | 123 | debug!("Image init time: {}ms", start.elapsed().as_millis()); 124 | Ok(image) 125 | } 126 | 127 | /// Initialize the image from the raw bytes. 128 | fn write_image( 129 | runtime_dir: &Path, 130 | monitor: &str, 131 | image: ImageBuffer, Vec>, 132 | ) -> Result<()> { 133 | let start = Instant::now(); 134 | let path = runtime_dir.join(format!("{monitor}.webp")); 135 | if path.exists() { 136 | remove_file(&path).context("Failed to remove old wallpaper")?; 137 | } 138 | let mut file = File::create(&path).context("Failed to open wallpaper file")?; 139 | 140 | let encoder = WebPEncoder::new_lossless(&mut file); 141 | image.write_with_encoder(encoder)?; 142 | 143 | debug!("Writing file took {}ms", start.elapsed().as_millis()); 144 | 145 | Ok(()) 146 | } 147 | 148 | #[derive(Clone)] 149 | struct ImageSpecs { 150 | width: usize, 151 | channel_count: usize, 152 | scale: usize, 153 | } 154 | 155 | /// Blur the image. 156 | /// 157 | /// This is done by applying a point filter to (scale x scale) chunks. 158 | fn blur_image( 159 | scale: usize, 160 | image: ImageBuffer, Vec>, 161 | ) -> Result, Vec>> { 162 | let start = Instant::now(); 163 | 164 | let (width, height) = image.dimensions(); 165 | // Get the channel count (bytes per pixel). 166 | let channel_count = Rgb::::CHANNEL_COUNT as usize; 167 | // Convert the image into its raw bytes. 168 | let mut source_bytes = image.into_raw(); 169 | 170 | // Define the chunks based on the image width, bytes per pixel and scaling factor. 171 | // Each chunk thereby has `scale` rows as data. 172 | let chunk_size = width as usize * channel_count * scale; 173 | 174 | let mut target_bytes: Vec = vec![0; source_bytes.len()]; 175 | let target_chunks = target_bytes.par_chunks_mut(chunk_size); 176 | 177 | // We need additional info about the image dimensions and specs in the worker threads. 178 | // That's why we also zip a vector of these specs into the actual data iterator. 179 | let specs = ImageSpecs { 180 | width: width as usize, 181 | channel_count, 182 | scale, 183 | }; 184 | let spec_vec = vec![specs; height as usize / scale + 1]; 185 | 186 | source_bytes 187 | .par_chunks_mut(chunk_size) 188 | .zip(target_chunks) 189 | .zip(spec_vec.par_iter()) 190 | .for_each(blur_row_chunk); 191 | 192 | debug!("Image conversion time: {}ms", start.elapsed().as_millis()); 193 | 194 | RgbImage::from_raw(width, height, target_bytes) 195 | .context("Failed to create rgb image from target buffer") 196 | } 197 | 198 | /// Take a chunk of rows and pixelate them. 199 | /// The pixelation process is dependand on a scale factor. For instance, a scale factor 200 | /// of 3 will change 3x3 pixel chunks to the pixel of the center pixel. 201 | /// 202 | /// This is done like this: 203 | /// 204 | /// The following represents a 9x3 pixel matrix. 205 | /// Each number represents a color. 206 | /// ``` 207 | /// 1 2 3 4 5 6 7 8 9 208 | /// 9 8 7 6 5 4 3 2 1 209 | /// 7 7 7 8 8 8 9 9 9 210 | /// ``` 211 | /// 212 | /// At first, we only look at the middle row. 213 | /// ``` 214 | /// 9 8 7 6 5 4 3 2 1 215 | /// ``` 216 | /// 217 | /// 218 | /// Step 1: 219 | /// We then change the color each 3x pixel grid to that of the center pixel: 220 | /// ``` 221 | /// 8 8 8 5 5 5 2 2 2 222 | /// ``` 223 | /// 224 | /// Step 2: 225 | /// The center row is then copied to the target buffer: 226 | /// ``` 227 | /// 8 8 8 5 5 5 2 2 2 228 | /// 8 8 8 5 5 5 2 2 2 229 | /// 8 8 8 5 5 5 2 2 2 230 | /// ``` 231 | fn blur_row_chunk(((source, target), specs): ((&mut [u8], &mut [u8]), &ImageSpecs)) { 232 | let channels = specs.channel_count; 233 | // Get the number of rows. 234 | let rows = source.len() / (specs.width * channels); 235 | let row_bytes = specs.width * channels; 236 | // Get the middle row (floored). 237 | let middle = rows / 2; 238 | 239 | // Calculate the start/end index of the middle row. 240 | let middle_row_start = middle * row_bytes; 241 | let middle_row_end = (middle + 1) * row_bytes; 242 | 243 | // Step 1: 244 | // Create an iterator through each pixel chunk of the middle row. 245 | let mut middle_pixel_iter = source 246 | .get_mut(middle_row_start..middle_row_end) 247 | .expect("Chunk size smaller than expected") 248 | .chunks_exact_mut(specs.scale * channels); 249 | 250 | // Calculate the indices for the middle pixel of each (full) pixel chunk. 251 | let middle_pixel_start = (specs.scale / 2) * channels; 252 | let middle_pixel_end = ((specs.scale / 2) + 1) * channels; 253 | #[allow(clippy::while_let_on_iterator)] 254 | while let Some(chunk) = middle_pixel_iter.next() { 255 | let middle_pixel = chunk 256 | .get_mut(middle_pixel_start..middle_pixel_end) 257 | .expect("Wrong middle pixel indices") 258 | .to_owned(); 259 | 260 | // Replace all pixels in the row with the middle pixel. 261 | for pixel in chunk.chunks_mut(3) { 262 | pixel.clone_from_slice(&middle_pixel); 263 | } 264 | } 265 | 266 | // For the remainder of the row, we just take the first pixel instead of the middle. 267 | // The remainder appears if the width isn't devidable by our `scale` factor. 268 | let remainder = middle_pixel_iter.into_remainder(); 269 | // Only copy stuff if there's more than pixel. 270 | if remainder.len() > channels { 271 | let first_pixel = remainder.get(0..channels).unwrap().to_owned(); 272 | for pixel in remainder.chunks_mut(channels) { 273 | pixel.clone_from_slice(&first_pixel); 274 | } 275 | } 276 | 277 | // Step 2 278 | // Copy the final row into all source rows of our chunk. 279 | let source_middle_row = source.get(middle_row_start..middle_row_end).unwrap(); 280 | for row in target.chunks_mut(row_bytes) { 281 | row.clone_from_slice(source_middle_row); 282 | } 283 | } 284 | -------------------------------------------------------------------------------- /src/bin/staggered_backups.rs: -------------------------------------------------------------------------------- 1 | //! A script used to implement staggered backups. 2 | //! 3 | //! It expects a folder that's full of files, each containing the timestamp of its creation in the 4 | //! filename. It then deletes all files except: 5 | //! - 1 file for each of the last 7 days 6 | //! - 1 file for each of the last 26 weeks 7 | //! - 1 file for each month of the last 2 years 8 | //! 9 | //! The file that's kept is always the oldest file that can be found for the given timespan. 10 | //! 11 | //! Example: 12 | //! The current date is 2025-04-02 13 | //! There're two files: 14 | //! - mydb_2025-04-01_10-00.dump 15 | //! - mydb_2025-04-01_23-00.dump 16 | //! 17 | //! In this case, the second file will be deleted, as it's newer than the first one. 18 | use std::{ 19 | collections::BTreeMap, 20 | fs::{DirEntry, remove_file}, 21 | path::PathBuf, 22 | }; 23 | 24 | use anyhow::{Context, Result, bail}; 25 | use chrono::{Datelike, Days, Months, NaiveDate, NaiveDateTime, TimeDelta, Utc}; 26 | use clap::{ArgAction, Parser}; 27 | use log::error; 28 | use regex::Regex; 29 | use script_utils::{ 30 | FileType, 31 | fs::find_leaf_dirs, 32 | logging, 33 | read_dir_or_fail, 34 | table::{pretty_table, print_headline_table}, 35 | }; 36 | 37 | #[derive(Parser, Debug)] 38 | #[clap( 39 | name = "Staggered backups", 40 | about = "Execute in a directory to ", 41 | author = "Arne Beer " 42 | )] 43 | pub struct CliArguments { 44 | /// Verbose mode (-v, -vv, -vvv) 45 | #[clap(short, long, action = ArgAction::Count)] 46 | pub verbose: u8, 47 | 48 | /// The path that contains the backup files. 49 | pub path: PathBuf, 50 | 51 | /// Regex that extracts the matching `date_format` string from a filename. 52 | /// The default extracts 53 | /// "2025-04-02_00-00" from something like 54 | /// "some_game_name_2025-04-02_00-00.tar.zst" 55 | #[clap( 56 | short, 57 | long, 58 | default_value = r"[a-z_]*_([0-9]{4}-[0-9]{2}-[0-9]{2}_[0-9]{2}-[0-9]{2})\..*" 59 | )] 60 | pub date_extraction_regex: String, 61 | 62 | /// The date format string that's used in the filename 63 | /// E.g. "%Y-%m-%d_%H-%M" for "2025-04-02_00-00.dump" 64 | #[clap(short, long, default_value = "%Y-%m-%d_%H-%M")] 65 | pub date_format: String, 66 | 67 | /// Don't do any operations unless this flag is set 68 | #[clap(short, long)] 69 | pub execute: bool, 70 | 71 | /// If this is set, recursively search for folders with backups from the given path. 72 | /// This will run the staggered backups for each directory that is found. 73 | #[clap(short, long)] 74 | pub recursive: bool, 75 | } 76 | 77 | fn main() -> Result<()> { 78 | // Parse commandline options. 79 | let args = CliArguments::parse(); 80 | // Min log level INFO 81 | logging::init_logger(args.verbose + 2); 82 | 83 | if !args.execute { 84 | println!("--- DRY RUN MODE ---"); 85 | } 86 | if args.recursive { 87 | println!("WARNING: Running in recursive mode."); 88 | } 89 | println!(); 90 | 91 | if !args.recursive { 92 | run_staggered_backup(&args.path, &args)?; 93 | } else { 94 | let leaf_dirs = find_leaf_dirs(args.path.clone())?; 95 | let mut leaf_dirs_iter = leaf_dirs.iter().peekable(); 96 | while let Some(dir) = leaf_dirs_iter.next() { 97 | run_staggered_backup(dir, &args)?; 98 | if leaf_dirs_iter.peek().is_some() { 99 | println!("\n"); 100 | } 101 | } 102 | } 103 | 104 | Ok(()) 105 | } 106 | 107 | pub fn run_staggered_backup(path: &PathBuf, args: &CliArguments) -> Result<()> { 108 | let files = read_dir_or_fail(path, Some(FileType::File)).context("Failed to read files")?; 109 | let mut files_by_date = BTreeMap::new(); 110 | println!("═══════════════════════════════════════════════════════════════"); 111 | print_headline_table(format!("Checking folder: {path:?}")); 112 | // Go through all files and extract the datetime from its filename. 113 | for file in files { 114 | let name = file 115 | .path() 116 | .file_name() 117 | .context(format!("Got file without filename: {:?}", file.path()))? 118 | .to_string_lossy() 119 | .to_string(); 120 | 121 | // Run the date extraction regex 122 | let re = Regex::new(&args.date_extraction_regex).context(format!( 123 | "Found invalid date_extraction_regex: {}", 124 | args.date_extraction_regex 125 | ))?; 126 | let Some(captures) = re.captures(&name) else { 127 | error!("Date extraction regex didn't match name. Ignoring file: {name}"); 128 | continue; 129 | }; 130 | 131 | let datetime = NaiveDateTime::parse_from_str(&captures[1], &args.date_format); 132 | let datetime = match datetime { 133 | Ok(datetime) => datetime, 134 | Err(_) => { 135 | error!("Failed to parse date string. Ignoring file: {name}"); 136 | continue; 137 | } 138 | }; 139 | 140 | files_by_date.insert(datetime, file); 141 | } 142 | if files_by_date.is_empty() { 143 | println!("No files for backup found."); 144 | return Ok(()); 145 | } 146 | 147 | let mut brackets = init_brackets()?; 148 | 149 | // Now we sort all entries into their brackets. 150 | // 151 | // The brackets are ordered in a way that the smaller brackets come first. 152 | // So even if there's some overlap, entries will be sorted into the smaller brackets 153 | // (i.e days instead of weeks). 154 | // 155 | // The backup files themselves are ordered from oldest to newest. 156 | // We now check for each bracket whether the newest backup matches the given bracket. 157 | // This is done until an entry is hit that is older than the current bracket. 158 | // In that case, we continue with the next bracket. 159 | for bracket in brackets.iter_mut() { 160 | 'inner: loop { 161 | { 162 | let entry = files_by_date.last_key_value(); 163 | // We hit the last entry, nothing to do. 164 | let Some((datetime, _)) = entry else { 165 | break; 166 | }; 167 | 168 | let start_of_bracket = &bracket.start_date; 169 | let end_of_bracket = start_of_bracket 170 | .checked_add_signed(TimeDelta::days(bracket.days.into())) 171 | .context("Couldn't calculate bracket length")?; 172 | 173 | // This entry is before the bracket, continue with the next one. 174 | let entry_date = datetime.date(); 175 | if entry_date < *start_of_bracket { 176 | break 'inner; 177 | } else if entry_date > end_of_bracket { 178 | bail!( 179 | "Encountered file that's somehow in the future for {} bracket ({:?} - {:?}):\n Entry date: {:?}", 180 | bracket.description, 181 | bracket.start_date, 182 | end_of_bracket, 183 | entry_date 184 | ) 185 | } 186 | } 187 | 188 | let (datetime, entry) = files_by_date.pop_last().unwrap(); 189 | bracket.entries.insert(datetime, entry); 190 | } 191 | } 192 | 193 | // Now delete all but the very first entry on each bracket. 194 | // So we keep 195 | // - One backup per day 196 | // - One backup per week 197 | // - One backup per month 198 | let mut final_entries = Vec::new(); 199 | println!("\nREMOVED FILES:"); 200 | let mut table = pretty_table(); 201 | table.set_header(vec!["bracket", "bracket start", "filename"]); 202 | for bracket in brackets.into_iter() { 203 | let mut entries_iter = bracket.entries.into_iter(); 204 | // Keep the very first entry. 205 | if let Some((_, entry)) = entries_iter.next() { 206 | final_entries.push((entry, bracket.description, bracket.start_date)); 207 | } 208 | 209 | for (_, entry) in entries_iter { 210 | table.add_row(vec![ 211 | bracket.description.to_string(), 212 | format!("{:?}", bracket.start_date), 213 | entry.file_name().to_string_lossy().to_string(), 214 | ]); 215 | if args.execute { 216 | remove_file(entry.path()) 217 | .context(format!("Failed to remove file: {:?}", entry.path()))?; 218 | } 219 | } 220 | } 221 | println!("{table}"); 222 | 223 | println!("\nREMAINING FILES:"); 224 | let mut table = pretty_table(); 225 | table.set_header(vec!["bracket", "bracket start", "filename"]); 226 | for (entry, desc, date) in final_entries { 227 | table.add_row(vec![ 228 | desc.to_string(), 229 | format!("{date:?}"), 230 | entry.file_name().to_string_lossy().to_string(), 231 | ]); 232 | } 233 | println!("{table}"); 234 | 235 | Ok(()) 236 | } 237 | 238 | struct Bracket { 239 | pub start_date: NaiveDate, 240 | /// How many days the bracket encompasses. 241 | pub days: u32, 242 | pub description: &'static str, 243 | /// The sorted list of all entries that're in a given bracket. 244 | pub entries: BTreeMap, 245 | } 246 | 247 | impl Bracket { 248 | pub fn new(start_date: NaiveDate, days: u32, description: &'static str) -> Self { 249 | Self { 250 | start_date, 251 | days, 252 | description, 253 | entries: BTreeMap::new(), 254 | } 255 | } 256 | } 257 | 258 | // The amount of days/weeks/months that should be tracked. 259 | // There's an overlap of these brackets. 260 | // For 30 days, 26 weeks and 24 months it would look roughly like this: 261 | // 30 daily brackets (smallest unit) 262 | // 26 - floor(30 / 7) = 22 weekly brackets 263 | // 24 - floor(26 * 7 / 30) = 18 monthly brackets 264 | const DAY_BRACKETS: u64 = 30; 265 | const WEEK_BRACKETS: u64 = 26; 266 | const MONTH_BRACKETS: u64 = 24; 267 | 268 | fn init_brackets() -> Result> { 269 | let mut brackets = Vec::new(); 270 | let mut last_daily_bracket = Utc::now().date_naive(); 271 | // Create daily brackets 272 | for _ in 0..DAY_BRACKETS { 273 | brackets.push(Bracket::new(last_daily_bracket, 0, "daily")); 274 | last_daily_bracket = last_daily_bracket 275 | .checked_sub_days(Days::new(1)) 276 | .context(format!( 277 | "Failed to go back one day from {last_daily_bracket:?}" 278 | ))?; 279 | } 280 | 281 | // Create weekly brackets for half a year. Start where the daily brackets end. 282 | let mut last_weekly_bracket = last_daily_bracket 283 | .checked_sub_days(Days::new( 284 | last_daily_bracket.weekday().num_days_from_monday().into(), 285 | )) 286 | .context("Failed to get start of week")?; 287 | 288 | let weekly_brackets = WEEK_BRACKETS - (DAY_BRACKETS as f64 / 7.0).floor() as u64; 289 | for _ in 0..weekly_brackets { 290 | brackets.push(Bracket::new(last_weekly_bracket, 6, "weekly")); 291 | last_weekly_bracket = last_weekly_bracket 292 | .checked_sub_days(Days::new(7)) 293 | .context("Failed to subtract several weeks back")?; 294 | } 295 | 296 | // Create monthly brackets for 24 months and start in the month the weekly brackets end. 297 | // This whole thing is a bit more involved as months differ in length. 298 | // We save the start of the last month in each iteration, subtract a day 299 | let mut start_of_month = last_weekly_bracket 300 | .checked_sub_days(Days::new(last_weekly_bracket.day0().into())) 301 | .context(format!( 302 | "Failed to get start of month for {last_weekly_bracket}" 303 | ))?; 304 | 305 | let monthly_brackets = MONTH_BRACKETS - (WEEK_BRACKETS as f64 * 7.0 / 30.0).floor() as u64; 306 | for _ in 0..monthly_brackets { 307 | // Go one month in future and one day back to get last day of current month. 308 | let last_day_of_month = start_of_month 309 | .checked_add_months(Months::new(1)) 310 | .unwrap() 311 | .checked_sub_days(Days::new(1)) 312 | .unwrap(); 313 | 314 | brackets.push(Bracket::new( 315 | start_of_month, 316 | last_day_of_month.day0(), 317 | "monthly", 318 | )); 319 | 320 | // Set the start of the month to the previous month. 321 | let previous_month = start_of_month 322 | .checked_sub_days(Days::new(20)) 323 | .context(format!("Failed to subtract 20 days for {start_of_month}"))?; 324 | start_of_month = previous_month 325 | .checked_sub_days(Days::new(previous_month.day0().into())) 326 | .context(format!("Failed to get start of month for {previous_month}"))?; 327 | } 328 | 329 | Ok(brackets) 330 | } 331 | -------------------------------------------------------------------------------- /src/timer.rs: -------------------------------------------------------------------------------- 1 | //! Generic phase-based timer system for managing notifications at different intervals. 2 | //! 3 | //! This module provides a flexible timer that can handle multiple notification phases, 4 | //! each with different trigger times and behaviors (one-time or recurring). 5 | 6 | use std::{iter::Peekable, vec::IntoIter}; 7 | 8 | use chrono::{DateTime, Duration, Utc}; 9 | use log::info; 10 | 11 | /// Defines the behavior of a timer phase 12 | #[derive(Debug, Clone)] 13 | pub enum PhaseType { 14 | /// Phase triggers once at the specified time 15 | OneTime { triggered: bool }, 16 | /// Phase triggers repeatedly with the given interval after the initial trigger 17 | Recurring { 18 | interval: usize, 19 | /// The last time when this phase triggered. 20 | /// Measured in minutes from `PhaseTimer.start_time` 21 | last_action_minute: usize, 22 | /// If true, the phase won't trigger at the start time but waits for the first interval 23 | delayed: bool, 24 | }, 25 | } 26 | 27 | /// A phase in the timer system 28 | #[derive(Debug, Clone)] 29 | pub struct Phase { 30 | /// How this phase behaves (one-time or recurring) 31 | pub phase_type: PhaseType, 32 | /// The action data associated with this phase 33 | /// 34 | /// This is generic so that the timer may be used in different contexts. 35 | pub action: T, 36 | /// The time (in minutes) when this phase becomes active 37 | pub trigger_at_minute: usize, 38 | } 39 | 40 | impl Phase { 41 | /// Create a one-time phase that triggers at the specified time 42 | pub fn one_time(trigger_time: usize, action: T) -> Self { 43 | Self { 44 | phase_type: PhaseType::OneTime { triggered: false }, 45 | action, 46 | trigger_at_minute: trigger_time, 47 | } 48 | } 49 | 50 | /// Create a recurring phase that triggers at the specified time and then repeats 51 | pub fn recurring(trigger_time: usize, interval: usize, action: T) -> Self { 52 | Self { 53 | phase_type: PhaseType::Recurring { 54 | interval, 55 | last_action_minute: 0, 56 | delayed: false, 57 | }, 58 | action, 59 | trigger_at_minute: trigger_time, 60 | } 61 | } 62 | 63 | /// Create a delayed recurring phase that waits for the first interval before triggering 64 | pub fn recurring_delayed(trigger_time: usize, interval: usize, action: T) -> Self { 65 | Self { 66 | phase_type: PhaseType::Recurring { 67 | interval, 68 | last_action_minute: 0, 69 | delayed: true, 70 | }, 71 | action, 72 | trigger_at_minute: trigger_time, 73 | } 74 | } 75 | } 76 | 77 | /// A generic timer that can manage multiple successive phases with different behaviors. 78 | /// 79 | /// The idea is to allow parterns like this: 80 | /// - Do nothing for 90 minutes 81 | /// - Then notify 2 times in 30 min intervals 82 | /// - The notify every 10 minutes until reset 83 | /// 84 | /// There's always only a single phase active, which is the phase with the highest `start_time`. 85 | #[derive(Debug, Clone)] 86 | pub struct PhaseTimer { 87 | original_phases: Vec>, 88 | phases: Peekable>>, 89 | current_phase: Phase, 90 | start_time: DateTime, 91 | last_check_time: Option>, 92 | } 93 | 94 | impl PhaseTimer { 95 | /// Create a new phase timer with the given phases 96 | pub fn new(mut phases: Vec>) -> Self { 97 | // Sort phases by trigger time to ensure the correct order. 98 | phases.sort_by_key(|phase| phase.trigger_at_minute); 99 | 100 | // Make a copy of the phases in case of a reset. 101 | let original_phases = phases.clone(); 102 | 103 | // Create an iterator over the phases in the correct order. 104 | let mut phases = phases.into_iter().peekable(); 105 | // Get the first phase. 106 | let Some(current_phase) = phases.next() else { 107 | panic!("Initialized Timer with no phases.") 108 | }; 109 | 110 | Self { 111 | original_phases, 112 | phases, 113 | current_phase, 114 | start_time: Utc::now(), 115 | last_check_time: None, 116 | } 117 | } 118 | 119 | /// Reset the timer to the beginning 120 | pub fn reset(&mut self) { 121 | self.start_time = Utc::now(); 122 | 123 | let phases = self.original_phases.clone(); 124 | 125 | // Create an iterator over the phases in the correct order. 126 | let mut phases = phases.into_iter().peekable(); 127 | // Get the first phase. 128 | let Some(current_phase) = phases.next() else { 129 | panic!("Initialized Timer with no phases.") 130 | }; 131 | 132 | self.phases = phases; 133 | self.current_phase = current_phase; 134 | self.start_time = Utc::now(); 135 | self.last_check_time = None; 136 | } 137 | 138 | /// Check if a phase should trigger right now. 139 | /// 140 | /// If so, the respective action will be returned. 141 | pub fn check(&mut self) -> Option { 142 | let minutes_since_start = self.elapsed_minutes(); 143 | 144 | // Trigger the current phase. Do this even if we might switch to the next phase just 145 | // afterwards. 146 | if self.should_trigger_current_phase(minutes_since_start) { 147 | return Some(self.current_phase.action.clone()); 148 | } 149 | 150 | // Check if we should switch to the next phase. 151 | if let Some(next_phase) = self.phases.peek() 152 | && minutes_since_start >= next_phase.trigger_at_minute 153 | { 154 | self.current_phase = self.phases.next().unwrap(); 155 | } 156 | 157 | None 158 | } 159 | 160 | /// Check if a phase should trigger right now, with automatic sleep detection and reset. 161 | /// 162 | /// If so, the respective action will be returned. 163 | /// 164 | /// If more than 30 minutes have passed since the last check, the timer assumes the 165 | /// machine went to sleep and automatically resets the timer. 166 | pub fn check_with_sleep_detection(&mut self) -> Option { 167 | let now = Utc::now(); 168 | 169 | // Check for sleep if we have a previous check time 170 | if let Some(last_check) = self.last_check_time { 171 | let time_since_check = now - last_check; 172 | if time_since_check > Duration::minutes(30) { 173 | info!( 174 | "Sleep detected ({}min gap), resetting timer", 175 | time_since_check.num_minutes() 176 | ); 177 | self.reset(); 178 | } 179 | } 180 | 181 | // Only set the last_check_time in here, as the `check()` call doesn't use this logic. 182 | self.last_check_time = Some(now); 183 | self.check() 184 | } 185 | 186 | /// Check if a phase should trigger at the given time. 187 | /// 188 | /// Returns the effective trigger time and action if the phase should activate. 189 | /// For recurring phases, calculates the most recent occurrence that hasn't been triggered yet. 190 | fn should_trigger_current_phase(&mut self, minutes_since_start: usize) -> bool { 191 | let phase = &mut self.current_phase; 192 | match &mut phase.phase_type { 193 | // One-time phases trigger once when their trigger time is reached 194 | PhaseType::OneTime { triggered } => { 195 | if !*triggered && minutes_since_start >= phase.trigger_at_minute { 196 | *triggered = true; 197 | true 198 | } else { 199 | false 200 | } 201 | } 202 | // Recurring phases trigger at their initial time and then at regular intervals 203 | PhaseType::Recurring { 204 | interval, 205 | last_action_minute, 206 | delayed, 207 | } => { 208 | // Calculate the next expected trigger time based on the last action 209 | let next_trigger_minute = if *last_action_minute == 0 { 210 | if *delayed { 211 | // First trigger for delayed phase - wait for interval after trigger time 212 | phase.trigger_at_minute + *interval 213 | } else { 214 | // First trigger - use the phase's trigger time 215 | phase.trigger_at_minute 216 | } 217 | } else { 218 | // Subsequent triggers - add interval to last action time 219 | *last_action_minute + *interval 220 | }; 221 | 222 | // Check if enough time has passed for the next trigger 223 | if minutes_since_start >= next_trigger_minute { 224 | *last_action_minute = next_trigger_minute; 225 | true 226 | } else { 227 | false 228 | } 229 | } 230 | } 231 | } 232 | 233 | /// Get the current elapsed minutes since the timer started 234 | pub fn elapsed_minutes(&self) -> usize { 235 | (Utc::now() - self.start_time).num_minutes() as usize 236 | } 237 | 238 | /// Test helper to simulate timer behavior at a specific time 239 | #[cfg(test)] 240 | fn action_at_time(&mut self, minutes: usize) -> Option { 241 | // Temporarily modify start_time to simulate the specified elapsed time 242 | let original_start = self.start_time; 243 | self.start_time = Utc::now() - chrono::Duration::minutes(minutes as i64); 244 | 245 | let result = self.check(); 246 | 247 | // Restore original start time 248 | self.start_time = original_start; 249 | result 250 | } 251 | 252 | /// Return the current phase 253 | #[cfg(test)] 254 | pub fn current_phase(&self) -> &Phase { 255 | &self.current_phase 256 | } 257 | } 258 | 259 | #[cfg(test)] 260 | mod tests { 261 | 262 | use super::*; 263 | 264 | #[derive(Debug, Clone, PartialEq)] 265 | enum TestAction { 266 | Initial, 267 | Reminder, 268 | } 269 | 270 | #[test] 271 | fn creates_timer_with_sorted_phases() { 272 | let phases = vec![ 273 | Phase::one_time(90, TestAction::Initial), 274 | Phase::recurring(30, 10, TestAction::Reminder), 275 | ]; 276 | 277 | let timer = PhaseTimer::new(phases); 278 | 279 | // First phase should be the one with earliest trigger time 280 | assert_eq!(timer.current_phase.trigger_at_minute, 30); 281 | 282 | // Original phases should be sorted by trigger time 283 | assert_eq!(timer.original_phases[0].trigger_at_minute, 30); 284 | assert_eq!(timer.original_phases[1].trigger_at_minute, 90); 285 | } 286 | 287 | #[test] 288 | fn no_action_before_first_phase() { 289 | let phases = vec![Phase::one_time(90, TestAction::Initial)]; 290 | let mut timer = PhaseTimer::new(phases); 291 | 292 | // Should not trigger before the phase's designated trigger time 293 | let action = timer.action_at_time(45); 294 | assert_eq!(action, None); 295 | } 296 | 297 | #[test] 298 | fn one_time_phase_triggers_once() { 299 | let phases = vec![Phase::one_time(90, TestAction::Initial)]; 300 | let mut timer = PhaseTimer::new(phases); 301 | 302 | let action = timer.action_at_time(90); 303 | assert_eq!(action, Some(TestAction::Initial)); 304 | 305 | // Should not trigger again 306 | let action = timer.action_at_time(95); 307 | assert_eq!(action, None); 308 | } 309 | 310 | #[test] 311 | fn triggers_recurring_phase() { 312 | let phases = vec![Phase::recurring(90, 10, TestAction::Reminder)]; 313 | let mut timer = PhaseTimer::new(phases); 314 | 315 | // First occurrence 316 | let action = timer.action_at_time(90); 317 | assert_eq!(action, Some(TestAction::Reminder)); 318 | 319 | // Should not trigger again until interval passes 320 | let action = timer.action_at_time(95); 321 | assert_eq!(action, None); 322 | 323 | // Second occurrence 324 | let action = timer.action_at_time(100); 325 | assert_eq!(action, Some(TestAction::Reminder)); 326 | } 327 | 328 | #[test] 329 | fn resets_timer() { 330 | let phases = vec![Phase::one_time(90, TestAction::Initial)]; 331 | let mut timer = PhaseTimer::new(phases); 332 | 333 | // Trigger the phase 334 | timer.action_at_time(90); 335 | 336 | // Reset and verify it can trigger again 337 | let before_reset = Utc::now(); 338 | timer.reset(); 339 | let after_reset = Utc::now(); 340 | 341 | // After reset, the current phase should be the first one again 342 | assert_eq!(timer.current_phase.trigger_at_minute, 90); 343 | assert!(timer.start_time >= before_reset && timer.start_time <= after_reset); 344 | 345 | // Should trigger again after reset 346 | let action = timer.action_at_time(90); 347 | assert_eq!(action, Some(TestAction::Initial)); 348 | } 349 | 350 | #[test] 351 | fn detects_sleep_and_resets_timer() { 352 | let phases = vec![Phase::recurring(10, 10, TestAction::Reminder)]; 353 | let mut timer = PhaseTimer::new(phases); 354 | 355 | // First trigger at 10 minutes 356 | let action = timer.action_at_time(10); 357 | assert_eq!(action, Some(TestAction::Reminder)); 358 | 359 | // Simulate normal check at 15 minutes (no action expected) 360 | timer.last_check_time = Some(Utc::now() - chrono::Duration::minutes(15)); 361 | let action = timer.check_with_sleep_detection(); 362 | assert_eq!(action, None); 363 | 364 | // Simulate sleep: set last_check_time to 35 minutes ago 365 | timer.last_check_time = Some(Utc::now() - chrono::Duration::minutes(35)); 366 | 367 | // This should detect sleep and reset the timer 368 | let action = timer.check_with_sleep_detection(); 369 | 370 | // After reset, we should be at the beginning of the timer 371 | // No immediate action since we're starting fresh 372 | assert_eq!(action, None); 373 | 374 | // Verify timer was actually reset by checking the start time is recent 375 | let minutes_since_start = timer.elapsed_minutes(); 376 | assert!( 377 | minutes_since_start < 2, 378 | "Timer should have been reset, but elapsed time is {}", 379 | minutes_since_start 380 | ); 381 | 382 | // Verify the timer works normally after reset 383 | let action = timer.action_at_time(10); 384 | assert_eq!(action, Some(TestAction::Reminder)); 385 | } 386 | 387 | #[test] 388 | fn delayed_recurring_phase() { 389 | // Test the dehn-polizei scenario: one-time at 90min, delayed recurring starts at 90min but 390 | // first triggers at 100min 391 | let phases = vec![ 392 | Phase::one_time(90, TestAction::Initial), 393 | Phase::recurring_delayed(90, 10, TestAction::Reminder), 394 | ]; 395 | let mut timer = PhaseTimer::new(phases); 396 | 397 | // No action before first phase 398 | assert_eq!(timer.action_at_time(89), None); 399 | 400 | // One-time phase triggers at 90 minutes 401 | assert_eq!(timer.action_at_time(90), Some(TestAction::Initial)); 402 | 403 | // No action between phases - delayed recurring waits for interval 404 | assert_eq!(timer.action_at_time(95), None); 405 | assert!( 406 | matches!( 407 | timer.current_phase().phase_type, 408 | PhaseType::Recurring { .. } 409 | ), 410 | "We should've entered the recurring phase" 411 | ); 412 | 413 | // Delayed recurring phase first triggers at 100 minutes (90 + 10 interval) 414 | assert_eq!(timer.action_at_time(100), Some(TestAction::Reminder)); 415 | 416 | // No action before next interval 417 | assert_eq!(timer.action_at_time(105), None); 418 | 419 | // Next recurring trigger at 110 minutes 420 | assert_eq!(timer.action_at_time(110), Some(TestAction::Reminder)); 421 | } 422 | } 423 | --------------------------------------------------------------------------------