├── .gitignore ├── LICENSE ├── README.md ├── git-change-author ├── git-find-dirs-deleted-files ├── git-find-dirs-many-files ├── git-find-dirs-unwanted ├── git-find-ignored-files ├── git-find-large-files ├── git-find-lfs-extensions ├── git-find-utf-16-encoded-files ├── git-normalize-pathnames ├── git-pack-benchmark ├── git-purge-files └── tests └── t-git-normalize-pathnames.sh /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Lars Schneider 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Git Repo Analysis Tools 2 | 3 | Git can become slow if a repository exceeds certain thresholds ([read this for details](http://larsxschneider.github.io/2016/09/21/large-git-repos)). Use the scripts explained below to identify possible culprits in a repository. The scripts have been tested on macOS but they should run on Linux as is. 4 | 5 | _Hint:_ The scripts can run for a long time and output a lot lines. Pipe their output to a file (`./script > myfile`) for further processing. 6 | 7 | ## Large by File Size 8 | Use the [git-find-large-files](git-find-large-files) script to identity large files in your Git repository that you could move to [Git LFS](https://git-lfs.github.com/) (e.g. using [git-lfs-migrate](https://github.com/git-lfs/git-lfs/blob/master/docs/man/git-lfs-migrate.1.ronn)). 9 | 10 | Use the [git-find-lfs-extensions](git-find-lfs-extensions) script to identify certain file types that you could move to [Git LFS](https://git-lfs.github.com/). 11 | 12 | ## Large by File Count 13 | Use the [git-find-dirs-many-files](git-find-dirs-many-files) and [git-find-dirs-unwanted](git-find-dirs-unwanted) scripts to identify directories with a large number of files. These might indicate 3rd party components that could be extracted. 14 | 15 | Use the [git-find-dirs-deleted-files](git-find-dirs-deleted-files) to identify directories that have been deleted and used to contain a lot of files. If you purge all files under these directories from your history then you might be able significantly reduce the overall size of your repository. 16 | 17 | 18 | -------------------------------------------------------------------------------- /git-change-author: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Fix an invalid committer/author all commits of your repository. 4 | # 5 | # Usage: 6 | # git-change-author 7 | # 8 | 9 | filter=$(cat <&2 echo "error: unknown option “$1”") 33 | print_help 34 | exit 1 35 | fi 36 | ;; 37 | esac 38 | 39 | # Find all ignored files 40 | files=$(git ls-files --ignored --exclude-standard) 41 | 42 | # Stop if no ignored files were found 43 | if [[ -z $files ]] 44 | then 45 | (>&2 echo "info: no ignored files in working tree or index") 46 | exit 0 47 | fi 48 | 49 | # Compute the file sizes of all these files 50 | file_sizes=$(echo "$files" | tr '\n' '\0' | xargs -0 du -sh) 51 | 52 | # Obtain the origins why these files are ignored 53 | gitignore_origins=$(echo "$files" | git check-ignore --verbose --stdin --no-index) 54 | 55 | # Merge the two lists into one 56 | command="join -1 2 -2 2 -t $'\t' -o 1.1,1.2,2.1 <(echo \"$file_sizes\") <(echo \"$gitignore_origins\")" 57 | 58 | if [[ $1 =~ ^-s|--sort-by-size$ ]] 59 | then 60 | command="$command | sort -h" 61 | fi 62 | 63 | eval "$command" 64 | -------------------------------------------------------------------------------- /git-find-large-files: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Print the largest files in a Git repository. The script must be called 4 | # from the root of the Git repository. You can pass a threshold to print 5 | # only files greater than a certain size (compressed size in Git database, 6 | # default is 500kb). 7 | # 8 | # Files that have a large compressed size should usually be stored in 9 | # Git LFS [2]. 10 | # 11 | # Based on script from Antony Stubbs [1] and improved with ideas from Peff. 12 | # 13 | # [1] http://stubbisms.wordpress.com/2009/07/10/git-script-to-show-largest-pack-objects-and-trim-your-waist-line/ 14 | # [2] https://git-lfs.github.com/ 15 | # 16 | # Usage: 17 | # git-find-large-files [size threshold in KB] 18 | # 19 | 20 | if [ -z "$1" ]; then 21 | MIN_SIZE_IN_KB=500 22 | else 23 | MIN_SIZE_IN_KB=$1 24 | fi 25 | 26 | # Use "look" if it is available, otherwise use "grep" (e.g. on Windows) 27 | if look >/dev/null 2>&1; then 28 | # On Debian the "-b" is available and required to make "look" perform 29 | # a binary search (see https://unix.stackexchange.com/a/499312/275508 ). 30 | if look 2>&1 | grep -q .-b; then 31 | search="look -b" 32 | else 33 | search=look 34 | fi 35 | else 36 | search=grep 37 | fi 38 | 39 | # set the internal field separator to line break, 40 | # so that we can iterate easily over the verify-pack output 41 | IFS=$'\n'; 42 | 43 | # list all objects including their size, sort by compressed size 44 | OBJECTS=$( 45 | git cat-file \ 46 | --batch-all-objects \ 47 | --batch-check='%(objectsize:disk) %(objectname)' \ 48 | | sort -nr 49 | ) 50 | 51 | TMP_DIR=$(mktemp -d "${TMPDIR:-/tmp}/git-find-large-files.XXXXXX") || exit 52 | trap "rm -rf '$TMP_DIR'" EXIT 53 | 54 | git rev-list --all --objects | sort > "$TMP_DIR/objects" 55 | git rev-list --all --objects --max-count=1 | sort > "$TMP_DIR/objects.1" 56 | 57 | for OBJ in $OBJECTS; do 58 | # extract the compressed size in kilobytes 59 | COMPRESSED_SIZE=$(($(echo $OBJ | cut -f 1 -d ' ')/1024)) 60 | 61 | if [ $COMPRESSED_SIZE -le $MIN_SIZE_IN_KB ]; then 62 | break 63 | fi 64 | 65 | # extract the SHA 66 | SHA=$(echo $OBJ | cut -f 2 -d ' ') 67 | 68 | # find the objects location in the repository tree 69 | LOCATION=$($search $SHA "$TMP_DIR/objects" | sed "s/$SHA //") 70 | if $search $SHA "$TMP_DIR/objects.1" >/dev/null; then 71 | # Object is in the head revision 72 | HEAD="Present" 73 | elif [ -e $LOCATION ]; then 74 | # Objects path is in the head revision 75 | HEAD="Changed" 76 | else 77 | # Object nor its path is in the head revision 78 | HEAD="Deleted" 79 | fi 80 | 81 | echo "$COMPRESSED_SIZE,$HEAD,$LOCATION" >> "$TMP_DIR/output" 82 | done 83 | 84 | if [ -f "$TMP_DIR/output" ]; then 85 | column -t -s ',' < "$TMP_DIR/output" 86 | fi 87 | 88 | rm -rf "$TMP_DIR" 89 | exit 0 90 | -------------------------------------------------------------------------------- /git-find-lfs-extensions: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Identify file extensions in a directory tree that could be tracked 4 | # by Git LFS in a repository migration to Git. 5 | # 6 | # Columns explanation: 7 | # Type = "binary" or "text". 8 | # Extension = File extension. 9 | # LShare = Percentage of files with the extensions are larger then 10 | # the threshold. 11 | # LCount = Number of files with the extensions are larger then the 12 | # threshold. 13 | # Count = Number of files with the extension in total. 14 | # Size = Size of all files with the extension in MB. 15 | # Min = Size of the smallest file with the extension in MB. 16 | # Max = Size of the largest file with the extension in MB. 17 | # 18 | # Attention this script does only process a directory tree or Git HEAD 19 | # revision. Git history is not taken into account. 20 | # 21 | # Usage: 22 | # git-find-lfs-extensions [size threshold in KB] 23 | # 24 | 25 | import os 26 | import sys 27 | 28 | # Threshold that defines a large file 29 | if len(sys.argv): 30 | THRESHOLD_IN_MB = float(sys.argv[1]) / 1024 31 | else: 32 | THRESHOLD_IN_MB = 0.5 33 | 34 | CWD = os.getcwd() 35 | CHUNKSIZE = 1024 36 | MAX_TYPE_LEN = len("Type") 37 | MAX_EXT_LEN = len("Extension") 38 | result = {} 39 | 40 | def is_binary(filename): 41 | """Return true if the given filename is binary. 42 | @raise EnvironmentError: if the file does not exist or cannot be accessed. 43 | @attention: found @ http://bytes.com/topic/python/answers/21222-determine-file-type-binary-text on 6/08/2010 44 | @author: Trent Mick 45 | @author: Jorge Orpinel """ 46 | fin = open(filename, 'rb') 47 | try: 48 | while 1: 49 | chunk = fin.read(CHUNKSIZE) 50 | if b'\0' in chunk: # found null byte 51 | return True 52 | if len(chunk) < CHUNKSIZE: 53 | break # done 54 | finally: 55 | fin.close() 56 | return False 57 | 58 | def add_file(ext, type, size_mb): 59 | ext = ext.lower() 60 | global MAX_EXT_LEN 61 | MAX_EXT_LEN = max(MAX_EXT_LEN, len(ext)) 62 | global MAX_TYPE_LEN 63 | MAX_TYPE_LEN = max(MAX_TYPE_LEN, len(type)) 64 | if ext not in result: 65 | result[ext] = { 66 | 'ext' : ext, 67 | 'type' : type, 68 | 'count_large' : 0, 69 | 'size_large' : 0, 70 | 'count_all' : 0, 71 | 'size_all' : 0 72 | } 73 | result[ext]['count_all'] = result[ext]['count_all'] + 1 74 | result[ext]['size_all'] = result[ext]['size_all'] + size_mb 75 | if size_mb > THRESHOLD_IN_MB: 76 | result[ext]['count_large'] = result[ext]['count_large'] + 1 77 | result[ext]['size_large'] = result[ext]['size_large'] + size_mb 78 | if not 'max' in result[ext] or size_mb > result[ext]['max']: 79 | result[ext]['max'] = size_mb 80 | if not 'min' in result[ext] or size_mb < result[ext]['min']: 81 | result[ext]['min'] = size_mb 82 | 83 | def print_line(type, ext, share_large, count_large, count_all, size_all, min, max): 84 | print('{}{}{}{}{}{}{}{}'.format( 85 | type.ljust(3+MAX_TYPE_LEN), 86 | ext.ljust(3+MAX_EXT_LEN), 87 | str(share_large).rjust(10), 88 | str(count_large).rjust(10), 89 | str(count_all).rjust(10), 90 | str(size_all).rjust(10), 91 | str(min).rjust(10), 92 | str(max).rjust(10) 93 | )) 94 | 95 | for root, dirs, files in os.walk(CWD): 96 | for basename in files: 97 | filename = os.path.join(root, basename) 98 | try: 99 | size_mb = float(os.path.getsize(filename)) / 1024 / 1024 100 | if not filename.startswith(os.path.join(CWD, '.git')) and size_mb > 0: 101 | if is_binary(filename): 102 | file_type = "binary" 103 | else: 104 | file_type = "text" 105 | ext = os.path.basename(filename) 106 | add_file('*', 'all', size_mb) 107 | if ext.find('.') == -1: 108 | # files w/o extension 109 | add_file(ext, file_type + " w/o ext", size_mb) 110 | else: 111 | while ext.find('.') >= 0: 112 | ext = ext[ext.find('.')+1:] 113 | if ext.find('.') <= 0: 114 | add_file(ext, file_type, size_mb) 115 | 116 | except Exception as e: 117 | print(e) 118 | 119 | print('') 120 | print_line('Type', 'Extension', 'LShare', 'LCount', 'Count', 'Size', 'Min', 'Max') 121 | print_line('-------', '---------', '-------', '-------', '-------', '-------', '-------', '-------') 122 | 123 | for ext in sorted(result, key=lambda x: (result[x]['type'], -result[x]['size_large'])): 124 | if result[ext]['count_large'] > 0: 125 | large_share = 100*result[ext]['count_large']/result[ext]['count_all'] 126 | print_line( 127 | result[ext]['type'], 128 | ext, 129 | str(round(large_share)) + ' %', 130 | result[ext]['count_large'], 131 | result[ext]['count_all'], 132 | int(result[ext]['size_all']), 133 | int(result[ext]['min']), 134 | int(result[ext]['max']) 135 | ) 136 | 137 | print("\nAdd to .gitattributes:\n") 138 | for ext in sorted(result, key=lambda x: (result[x]['type'], x)): 139 | if len(ext) > 0 and result[ext]['type'] == "binary" and result[ext]['count_large'] > 0: 140 | print('*.{} filter=lfs diff=lfs merge=lfs -text'.format( 141 | "".join("[" + c.upper() + c.lower() + "]" if (('a' <= c <= 'z') or ('A' <= c <= 'Z')) else c for c in ext) 142 | )) 143 | -------------------------------------------------------------------------------- /git-find-utf-16-encoded-files: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Find and print files that are encoded with UTF-16 4 | # 5 | # Usage: 6 | # git-find-utf-16-encoded-files 7 | # 8 | 9 | find . -type f -not -path "./.git/*" -exec file {} \; | 10 | grep --ignore-case utf-16 11 | -------------------------------------------------------------------------------- /git-normalize-pathnames: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | # 3 | # Normalize pathname casing in Git repositories. This makes it easier for 4 | # `git log` to visualize the history of a file. E.g. if a file was renamed 5 | # from "/foo/BAR" to "/foo/bar" then Git (and GitHub!) would not show the 6 | # entire history of that file by default. This script fixes this! 7 | # 8 | # TODO: This script detects only pathnames that have changed their casing! 9 | # It does not yet detect subset of pathnames that have different casing 10 | # and live next to each other. E.g.: 11 | # /foo/bar1 12 | # /Foo/bar2 13 | # 14 | # Usage: 15 | # git-normalize-pathnames 16 | # 17 | 18 | use strict; 19 | use warnings; 20 | 21 | print "Scanning repo...\n"; 22 | 23 | # Query all pathnames ever used in the Git repo in new to old order 24 | # Also disable all rename detection 25 | my @pathnames 26 | = `git -c diff.rename=0 log --branches --name-only --pretty=format:`; 27 | 28 | # Generate list of case sensitive unique pathnames 29 | my %seen_cs; 30 | my @unique_cs; 31 | for my $p (@pathnames) { 32 | next if $seen_cs{$p}++; 33 | push( @unique_cs, $p ); 34 | } 35 | 36 | # Generate list of case insensitive unique pathnames 37 | my %seen_ci; 38 | my @unique_ci; 39 | for my $p (@unique_cs) { 40 | next if $seen_ci{ lc($p) }++; 41 | push( @unique_ci, $p ); 42 | } 43 | 44 | # Generate list of pathnames that have multiple case variants 45 | my @dups; 46 | for my $p (@unique_ci) { 47 | next if $seen_ci{ lc($p) } < 2; 48 | push( @dups, $p ); 49 | } 50 | 51 | if ( scalar @dups == 0 ) { 52 | print "\nNo pathname issues detected.\n"; 53 | exit 0; 54 | } 55 | 56 | print "\nPathname issues detected:\n"; 57 | for my $p (@dups) { 58 | print " " . $p; 59 | } 60 | print "\nRewriting history...\n"; 61 | 62 | # TODO: check file touched twice? 63 | 64 | my %seen; 65 | my $skip = 0; 66 | open( my $pipe_in, "git fast-export --progress=100 --no-data HEAD |" ) or die $!; 67 | open( my $pipe_out, "| git fast-import --force --quiet" ) or die $!; 68 | while ( my $row = <$pipe_in> ) { 69 | if ( length($row) > $skip ) { 70 | my $s = $skip; 71 | $skip = 0; 72 | my $cmd = substr( $row, $s ); 73 | 74 | # skip data blocks 75 | if ( $cmd =~ /^data ([0-9]+)$/ ) { 76 | $skip = $1; 77 | } 78 | # ignore empty lines 79 | elsif ( $cmd =~ /^$/ ) { } 80 | # ignore commands 81 | elsif ( $cmd =~ /^(reset|blob|checkpoint|progress|feature|option|done|from|mark|author|from)/ ) { } 82 | elsif ( $cmd =~ /^(commit|tag|merge)/ ) { 83 | %seen = (); 84 | } 85 | elsif ( $cmd =~ /^M [0-9]{6} [0-9a-f]{40} .+/ ) { 86 | for my $p (@dups) { 87 | if ( $cmd =~ s/\Q$p\E/\Q$p\E/i ) { 88 | # print "M" . $p . "\n"; 89 | $seen{ $p }++; 90 | $row = substr( $row, 0, $s ) . $cmd; 91 | last; 92 | } 93 | } 94 | } 95 | # rewrite path names 96 | elsif ( $cmd =~ /^D .+/ ) { 97 | for my $p (@dups) { 98 | if ( $cmd =~ s/\Q$p\E/\Q$p\E/i ) { 99 | # print "D" . $p . "\n"; 100 | if ( $seen{ $p } ) { 101 | $cmd = ""; 102 | } 103 | $row = substr( $row, 0, $s ) . $cmd; 104 | last; 105 | } 106 | } 107 | } 108 | else { 109 | die "Unknown command:\n" . $cmd . "\nIn row:\n" . $row; 110 | } 111 | } 112 | elsif ( $skip > 0 ) { 113 | $skip -= length($row); 114 | } 115 | else { 116 | die "Skipping data block failed: " . $skip; 117 | } 118 | 119 | print {$pipe_out} $row; 120 | } 121 | 122 | print "Done!\n"; 123 | -------------------------------------------------------------------------------- /git-pack-benchmark: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Benchmark different `git repack` delta chains lengths for a given repo 4 | # based on the Peff's analysis discussed here: 5 | # https://github.com/git/git/commit/07e7dbf0db05a550a92a6a5a8977ac47efa7b794 6 | # 7 | # Usage: 8 | # git-pack-banchmark 9 | # 10 | 11 | TEMP_DIR=$1 12 | REPO_URL=$2 13 | 14 | function best-of { 15 | { for i in 1 2 3; do time $1 >/dev/null; done; } 2>&1 | 16 | grep real | 17 | cut -c 6- | 18 | sort -n | 19 | head -n1 20 | } 21 | 22 | function run { 23 | DEPTH=$1 24 | REPO_DIR=depth-$DEPTH 25 | 26 | printf "\n### TEST RUN ###\n" 27 | printf "Start: $(date)\n"; 28 | printf "Depth: $DEPTH\n"; 29 | 30 | if test -d "$REPO_DIR"; then 31 | echo "$(tput setaf 1)$REPO_DIR already exists - reusing! $(tput sgr0)" 32 | else 33 | cp -r base "$REPO_DIR" 34 | git --git-dir="$REPO_DIR" repack -a -d -f --depth=$DEPTH --window=250 >/dev/null 2>&1 35 | fi 36 | 37 | pushd "$REPO_DIR" >/dev/null 38 | printf "Size: "; du -sh . 39 | printf "rev-list: "; best-of "git rev-list --objects --all" 40 | printf "log -Sfoo: "; best-of "git log -Sfoo" 41 | printf "\n" 42 | popd >/dev/null 43 | } 44 | 45 | mkdir -p "$TEMP_DIR" 46 | pushd "$TEMP_DIR" 47 | printf "\n#### REPO PACK BENCHMARK ###\n" 48 | git --version 49 | printf "System: "; uname; 50 | printf "Repo URL: $REPO_URL\n" 51 | 52 | if test -d base; then 53 | printf "$(tput setaf 1)Repo already exists - reusing! Delete $TEMP_DIR for a clean run!$(tput sgr0)\n" 54 | else 55 | git clone --bare $REPO_URL base 56 | fi 57 | run 250 58 | run 100 59 | run 50 60 | run 10 61 | popd 62 | -------------------------------------------------------------------------------- /git-purge-files: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | # 3 | # Purge files from Git repositories. 4 | # 5 | # Attention: 6 | # You want to run this script on a case sensitive file-system (e.g. 7 | # ext4 on Linux). Otherwise the resulting Git repository will not 8 | # contain changes that modify the casing of file paths. 9 | # 10 | # Usage: 11 | # git-purge-files [path-regex1] [path-regex2] ... 12 | # 13 | # Examples: 14 | # Remove the file "test.bin" from all directories: 15 | # git-purge-path "/test.bin$" 16 | # 17 | # Remove all "*.bin" files from all directories: 18 | # git-purge-path "\.bin$" 19 | # 20 | # Remove all files in the "/foo" directory: 21 | # git-purge-path "^/foo/$" 22 | # 23 | 24 | use strict; 25 | use warnings; 26 | 27 | my $path_regex = join( "|", @ARGV ); 28 | 29 | open( my $pipe_in, "git fast-export --progress=10000 --no-data --all --signed-tags=warn-strip --tag-of-filtered-object=rewrite |" ) or die $!; 30 | open( my $pipe_out, "| git fast-import --force --quiet" ) or die $!; 31 | 32 | LOOP: while ( my $cmd = <$pipe_in> ) { 33 | my $data = ""; 34 | if ( $cmd =~ /^data ([0-9]+)$/ ) { 35 | # skip data blocks 36 | my $skip_bytes = $1; 37 | read($pipe_in, $data, $skip_bytes); 38 | } 39 | elsif ( $cmd =~ /^M [0-9]{6} [0-9a-f]{40} (.+)$/ ) { 40 | my $pathname = $1; 41 | next LOOP if ("/" . $pathname) =~ /$path_regex/o 42 | } 43 | print {$pipe_out} $cmd . $data; 44 | } 45 | -------------------------------------------------------------------------------- /tests/t-git-normalize-pathnames.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | function die { 4 | echo "$1" 5 | exit 1 6 | } 7 | 8 | CURRENT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")">/dev/null && pwd)" 9 | TEST_DIR="$CURRENT_DIR/tmp" 10 | 11 | rm -rf "$TEST_DIR" 12 | mkdir "$TEST_DIR" 13 | pushd "$TEST_DIR" 14 | 15 | git init . >/dev/null 16 | 17 | echo "a" > foo 18 | git add foo >/dev/null 19 | git commit -m "add file" >/dev/null 20 | 21 | rm foo 22 | git add foo 23 | echo "b" > Foo 24 | git add Foo >/dev/null 25 | git commit -m "change case of file" >/dev/null 26 | 27 | ../../git-normalize-pathnames 28 | 29 | rm Foo 30 | git reset --hard HEAD 31 | 32 | [ -f Foo ] || die "FAIL" 33 | [ $(git log --numstat -- Foo | grep Foo | wc -l) -eq 2 ] || die "FAIL" 34 | popd 35 | --------------------------------------------------------------------------------