├── .gitignore ├── LICENSE.md ├── README.md ├── add ├── common └── libc.sh ├── download ├── dump ├── find ├── get ├── identify ├── libs └── .gitignore └── searchengine ├── .dockerignore ├── .gitignore ├── DEVELOPMENT.md ├── Dockerfile ├── README.md ├── api.yml ├── app.py ├── config.py ├── docker-compose.yml ├── frontend ├── .gitignore ├── README.md ├── package-lock.json ├── package.json ├── public │ ├── index.html │ ├── manifest.json │ └── robots.txt └── src │ ├── App.css │ ├── App.js │ ├── App.test.js │ ├── index.css │ ├── index.js │ ├── serviceWorker.js │ └── setupTests.js ├── index.py ├── nginx.conf ├── requirements.txt ├── update.sh └── uwsgi.ini /.gitignore: -------------------------------------------------------------------------------- 1 | /db/ 2 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright 2020 Niklas Baumstark 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Web interface 2 | 3 | libc-database now has a web service and frontend. Visit https://libc.rip/ to 4 | try it out! Read https://github.com/niklasb/libc-database/tree/master/searchengine 5 | if you are interested in the API. 6 | 7 | 8 | ## Building a libc offset database 9 | 10 | If you're getting errors, please check the "Requirements" section below. 11 | 12 | Fetch the desired libc categories and extract the symbol offsets. 13 | It will not download anything twice, so you can also use it to update your 14 | database: 15 | 16 | $ ./get # List categories 17 | $ ./get ubuntu debian # Download Ubuntu's and Debian's libc, old default behavior 18 | $ ./get all # Download all categories. Can take a while! 19 | 20 | You can also add a custom libc to your database. 21 | 22 | $ ./add /usr/lib/libc-2.21.so 23 | 24 | Find all the libc's in the database that have the given names at the given 25 | addresses. Only the last 12 bits are checked because randomization usually 26 | works on page size level. 27 | 28 | $ ./find printf 260 puts f30 29 | archive-glibc (libc6_2.19-10ubuntu2_i386) 30 | 31 | Find a libc from the leaked return address into `__libc_start_main`. 32 | 33 | $ ./find __libc_start_main_ret a83 34 | ubuntu-trusty-i386-libc6 (libc6_2.19-0ubuntu6.6_i386) 35 | archive-eglibc (libc6_2.19-0ubuntu6_i386) 36 | ubuntu-utopic-i386-libc6 (libc6_2.19-10ubuntu2.3_i386) 37 | archive-glibc (libc6_2.19-10ubuntu2_i386) 38 | archive-glibc (libc6_2.19-15ubuntu2_i386) 39 | 40 | Dump some useful offsets, given a libc ID. You can also provide your own names 41 | to dump. 42 | 43 | $ ./dump libc6_2.19-0ubuntu6.6_i386 44 | offset___libc_start_main_ret = 0x19a83 45 | offset_system = 0x00040190 46 | offset_dup2 = 0x000db590 47 | offset_recv = 0x000ed2d0 48 | offset_str_bin_sh = 0x160a24 49 | 50 | Check whether a library is already in the database. 51 | 52 | $ ./identify /usr/lib/libc.so.6 53 | local-f706181f06104ef6c7008c066290ea47aa4a82c5 54 | 55 | Or find a libc using a hash (currently BuildID, MD5, SHA1 and SHA256 is 56 | implemented): 57 | 58 | $ ./identify bid=ebeabf5f7039f53748e996fc976b4da2d486a626 59 | libc6_2.17-93ubuntu4_i386 60 | $ ./identify md5=af7c40da33c685d67cdb166bd6ab7ac0 61 | libc6_2.17-93ubuntu4_i386 62 | $ ./identify sha1=9054f5cb7969056b6816b1e2572f2506370940c4 63 | libc6_2.17-93ubuntu4_i386 64 | $ ./identify sha256=8dc102c06c50512d1e5142ce93a6faf4ec8b6f5d9e33d2e1b45311aef683d9b2 65 | libc6_2.17-93ubuntu4_i386 66 | 67 | Download the whole libs corresponding to a libc ID. 68 | 69 | $ ./download libc6_2.23-0ubuntu10_amd64 70 | Getting libc6_2.23-0ubuntu10_amd64 71 | -> Location: http://security.ubuntu.com/ubuntu/pool/main/g/glibc/libc6_2.23-0ubuntu10_amd64.deb 72 | -> Downloading package 73 | -> Extracting package 74 | -> Package saved to libs/libc6_2.23-0ubuntu10_amd64 75 | $ ls libs/libc6_2.23-0ubuntu10_amd64 76 | ld-2.23.so ... libc.so.6 ... libpthread.so.0 ... 77 | 78 | 79 | ## Requirements 80 | 81 | ### General 82 | 83 | To run these scripts, you will need following command on your PATH: 84 | 85 | * readelf 86 | * objdump 87 | * strings 88 | * perl 89 | * find 90 | * grep 91 | * md5sum 92 | * sha1sum 93 | * sha256sum 94 | * file 95 | 96 | 97 | ### Debian-based (Ubuntu, Debian, Kali Linux, ParrotSec) 98 | 99 | * mktemp 100 | * perl 101 | * wget 102 | * ar 103 | * tar 104 | * grep 105 | * zstd 106 | 107 | 108 | ### RPM-based (category 'rpm') 109 | 110 | * mktemp 111 | * perl 112 | * wget 113 | * rpm2cpio 114 | * cpio 115 | * grep 116 | 117 | 118 | ### CentOS-based 119 | 120 | In addition to the RPM-Based requirements, you will need: 121 | 122 | * wget 123 | * gzip 124 | * grep 125 | 126 | 127 | ### Pacman-based 128 | 129 | * mktemp 130 | * perl 131 | * grep 132 | * sed 133 | * cat 134 | * wget 135 | * zstd 136 | * xz-utils 137 | * tar 138 | 139 | 140 | ### APK Based 141 | 142 | * mktemp 143 | * perl 144 | * wget 145 | * tar 146 | * gzip 147 | * grep 148 | 149 | 150 | ### Launchpad-based 151 | 152 | In addition to the Debian-based requirements, you will need: 153 | 154 | * jq 155 | 156 | 157 | ### Install everything 158 | 159 | To install everything on Debian 10, run these commands: 160 | 161 | ```sh 162 | apt-get update 163 | apt-get install -y \ 164 | binutils file \ 165 | wget \ 166 | rpm2cpio cpio \ 167 | zstd jq 168 | ``` 169 | 170 | 171 | -------------------------------------------------------------------------------- /add: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ $# != 1 ]]; then 3 | echo >&2 "Usage: $0 libc_filename" 4 | exit 2 5 | fi 6 | libc="$(readlink -f "$1")" 7 | cd "$(dirname "$0")" 8 | 9 | . common/libc.sh 10 | 11 | requirements_general || die "General requirements are not met. Please, refer to README.md for installation instructions" 12 | requirements_local || die "Requirements for index a local libc are not met. Please, refer to README.md for installation instructions" 13 | add_local "$libc" 14 | -------------------------------------------------------------------------------- /common/libc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mkdir -p db 4 | 5 | die() { 6 | echo >&2 $1 7 | exit 1 8 | } 9 | 10 | dump_symbols() { 11 | readelf -Ws $1 | perl -n -e '/: (\w+)\s+\w+\s+(?:FUNC|OBJECT)\s+(?:\w+\s+){3}(\w+)\b(?:@@GLIBC)?/ && print "$2 $1\n"' | sort -u 12 | } 13 | 14 | extract_label() { 15 | perl -n -e '/(\w+)/ && print $1' 16 | } 17 | 18 | dump_libc_start_main_ret() { 19 | local call_main=`objdump -D $1 \ 20 | | grep -EA 100 '<__libc_start_main.*>:' \ 21 | | grep call \ 22 | | grep -EB 1 '' \ 23 | | head -n 1 \ 24 | | extract_label` 25 | # Since glibc 2.34 it's __libc_start_main -> __libc_start_call_main -> main 26 | # and __libc_start_call_main is right before __libc_start_main. 27 | if [[ "$call_main" == "" ]]; then 28 | local call_main=`objdump -D $1 \ 29 | | grep -EB 100 '<__libc_start_main.*>:' \ 30 | | grep call \ 31 | | grep -EB 1 '' \ 32 | | head -n 1 \ 33 | | extract_label` 34 | fi 35 | local offset=`objdump -D $1 | grep -EA 1 "(^| )$call_main:" | tail -n 1 | extract_label` 36 | if [[ "$offset" != "" ]]; then 37 | echo "__libc_start_main_ret $offset" 38 | fi 39 | } 40 | 41 | dump_bin_sh() { 42 | local offset=`strings -a -t x $1 | grep '/bin/sh' | head -n1 | extract_label` 43 | if [[ "$offset" != "" ]]; then 44 | echo "str_bin_sh $offset" 45 | fi 46 | } 47 | 48 | process_libc() { 49 | local libc=$1 50 | local id=$2 51 | local info=$3 52 | local url=$4 53 | echo " -> Writing libc ${libc} to db/${id}.so" 54 | cp $libc db/${id}.so 55 | echo " -> Writing symbols to db/${id}.symbols" 56 | (dump_symbols $libc; dump_libc_start_main_ret $libc; dump_bin_sh $libc) \ 57 | > db/${id}.symbols 58 | echo " -> Writing version info" 59 | echo "$info" > db/${id}.info 60 | echo "$url" > db/${id}.url 61 | } 62 | 63 | index_libc() { 64 | local tmp="$1" 65 | local id="$2" 66 | local info="$3" 67 | local url="$4" 68 | suffix= 69 | cnt=1 70 | # Sometimes, the real libc.so is not matched with `libc.so*`. 71 | libs=$(find "$tmp" -name 'libc.so*';find "$tmp" -name 'libc[-_.][a-z]*.so*') 72 | [[ -z "$libs" ]] && die "Cannot locate the libc file" 73 | for libc in $libs; do 74 | # Some file matched can be ASCII files instead :( 75 | if ! (file "$libc" | grep -q 'ELF\|symbolic link to') ; then 76 | echo " -> libc ${libc} is not an ELF file" 77 | continue # Keep cnt and suffix as it 78 | fi 79 | process_libc "$libc" "$id$suffix" "$info" "$url" 80 | cnt=$((cnt+1)) 81 | suffix=_$cnt 82 | done 83 | } 84 | 85 | check_id() { 86 | local id=$1 87 | if [[ -e db/${id}.info ]]; then 88 | echo " -> Already have this version, 'rm ${PWD}/db/${id}.*' to force" 89 | return 1 90 | fi 91 | return 0 92 | } 93 | 94 | requirements_general() { 95 | which readelf 1>/dev/null 2>&1 || return 96 | which perl 1>/dev/null 2>&1 || return 97 | which objdump 1>/dev/null 2>&1 || return 98 | which strings 1>/dev/null 2>&1 || return 99 | which find 1>/dev/null 2>&1 || return 100 | which grep 1>/dev/null 2>&1 || return 101 | return 0 102 | } 103 | 104 | # ===== Debian-like ===== # 105 | 106 | get_debian() { 107 | local url="$1" 108 | local info="$2" 109 | local pkgname="$3" 110 | local tmp=`mktemp -d` 111 | echo "Getting $info" 112 | echo " -> Location: $url" 113 | local id=`echo $url | perl -n -e '/('"$pkgname"'[^\/]*)\./ && print $1'` 114 | echo " -> ID: $id" 115 | check_id $id || return 116 | echo " -> Downloading package" 117 | if ! wget "$url" 2>/dev/null -O $tmp/pkg.deb; then 118 | echo >&2 "Failed to download package from $url" 119 | return 120 | fi 121 | echo " -> Extracting package" 122 | pushd $tmp 1>/dev/null 123 | ar x pkg.deb || die "ar failed" 124 | if [ -f data.tar.zst ]; then 125 | zstd -d data.tar.zst || die "zstd failed" 126 | tar xf data.tar || die "tar failed" 127 | else 128 | tar xf data.tar.* || die "tar failed" 129 | fi 130 | popd 1>/dev/null 131 | index_libc "$tmp" "$id" "$info" "$url" 132 | rm -rf $tmp 133 | } 134 | 135 | get_all_debian() { 136 | local info=$1 137 | local url=$2 138 | local pkgname=$3 139 | for f in `wget $url/ -O - 2>/dev/null | grep -Eoh "$pkgname"'(-i386|-amd64|-x32)?_[^"]*(amd64|i386)\.deb' |grep -v ""`; do 140 | get_debian "$url/$f" "$info" "$pkgname" 141 | done 142 | return 0 143 | } 144 | 145 | requirements_debian() { 146 | which mktemp 1>/dev/null 2>&1 || return 147 | which perl 1>/dev/null 2>&1 || return 148 | which wget 1>/dev/null 2>&1 || return 149 | which ar 1>/dev/null 2>&1 || return 150 | which tar 1>/dev/null 2>&1 || return 151 | which grep 1>/dev/null 2>&1 || return 152 | which zstd 1>/dev/null 2>&1 || return 153 | return 0 154 | } 155 | 156 | # ===== RPM ===== # 157 | 158 | get_rpm() { 159 | local url="$1" 160 | local info="$2" 161 | local pkgname="$3" 162 | local tmp="$(mktemp -d)" 163 | echo "Getting $info" 164 | echo " -> Location: $url" 165 | local id=$(echo "$url" | perl -n -e '/('"$pkgname"'[^\/]*)\./ && print $1') 166 | echo " -> ID: $id" 167 | check_id "$id" || return 168 | echo " -> Downloading package" 169 | if ! wget --no-dns-cache --connect-timeout=30 "$url" 2>/dev/null -O "$tmp/pkg.rpm"; then 170 | echo >&2 "Failed to download package from $url" 171 | return 172 | fi 173 | echo " -> Extracting package" 174 | pushd "$tmp" 1>/dev/null 175 | (rpm2cpio pkg.rpm || die "rpm2cpio failed") | \ 176 | (cpio -id --quiet || die "cpio failed") 177 | popd 1>/dev/null 178 | index_libc "$tmp" "$id" "$info" "$url" 179 | rm -rf "$tmp" 180 | } 181 | 182 | get_all_rpm() { 183 | local info=$1 184 | local pkg=$2 185 | local pkgname=$3 186 | local arch=$4 187 | local website="http://rpmfind.net" 188 | local searchurl="$website/linux/rpm2html/search.php?query=$pkg" 189 | echo "Getting RPM package location: $info $pkg $pkgname $arch" 190 | local url="" 191 | for i in $(seq 1 3); do 192 | urls=$(wget "$searchurl" -O - 2>/dev/null \ 193 | | grep -oh "/[^']*${pkgname}[^']*\.$arch\.rpm") 194 | [[ -z "$urls" ]] || break 195 | echo "Retrying..." 196 | sleep 1 197 | done 198 | 199 | if ! [[ -n "$urls" ]]; then 200 | echo >&2 "Failed to get RPM package URL for $info $pkg $pkgname $arch" 201 | return 202 | fi 203 | 204 | for url in $urls 205 | do 206 | get_rpm "$website$url" "$info" "$pkgname" 207 | sleep .1 208 | done 209 | } 210 | 211 | requirements_rpm() { 212 | which mktemp 1>/dev/null 2>&1 || return 213 | which perl 1>/dev/null 2>&1 || return 214 | which wget 1>/dev/null 2>&1 || return 215 | which rpm2cpio || return 216 | which cpio 1>/dev/null 2>&1 || return 217 | which grep 1>/dev/null 2>&1 || return 218 | return 0 219 | } 220 | 221 | # ===== CentOS ===== # 222 | 223 | get_from_filelistgz() { 224 | local info=$1 225 | local website=$2 226 | local pkg=$3 227 | local arch=$4 228 | echo "Getting package $pkg locations" 229 | local url="" 230 | for i in $(seq 1 3); do 231 | urls=$(wget "$website/filelist.gz" -O - 2>/dev/null \ 232 | | gzip -cd \ 233 | | grep -h "$pkg-[0-9]" \ 234 | | grep -h "$arch\.rpm") 235 | [[ -z "$urls" ]] || break 236 | echo "Retrying..." 237 | sleep 1 238 | done 239 | [[ -n "$urls" ]] || die "Failed to get package version" 240 | for url in $urls 241 | do 242 | get_rpm "$website/$url" "$info" "$pkg" 243 | sleep .1 244 | done 245 | } 246 | 247 | requirements_centos() { 248 | which wget 1>/dev/null 2>&1 || return 249 | which gzip 1>/dev/null 2>&1 || return 250 | which grep 1>/dev/null 2>&1 || return 251 | requirements_rpm || return 252 | return 0 253 | } 254 | 255 | # ===== CentOS-Stream ===== # 256 | 257 | get_from_fullfiletimelist() { 258 | local info=$1 259 | local website=$2 260 | local pkg=$3 261 | local arch=$4 262 | echo "Getting package $pkg locations" 263 | local url="" 264 | for i in $(seq 1 3); do 265 | urls=$(wget "$website/fullfiletimelist" -O - 2>/dev/null \ 266 | | grep -h "$pkg-[0-9]" \ 267 | | grep -h "$arch\.rpm" \ 268 | | awk '{print $4}') 269 | [[ -z "$urls" ]] || break 270 | echo "Retrying..." 271 | sleep 1 272 | done 273 | [[ -n "$urls" ]] || die "Failed to get package version" 274 | for url in $urls 275 | do 276 | get_rpm "$website/$url" "$info" "$pkg" 277 | sleep .1 278 | done 279 | } 280 | 281 | requirements_centos_stream() { 282 | which awk 1>/dev/null 2>&1 || return 283 | requirements_centos || return 284 | return 0 285 | } 286 | 287 | # ===== Arch ===== # 288 | 289 | get_pkg() { 290 | local url="$1" 291 | local info="$2" 292 | local pkgname="$3" 293 | local tmp="$(mktemp -d)" 294 | echo "Getting $info" 295 | echo " -> Location: $url" 296 | local id=$(echo "$url" | perl -n -e '/('"$pkgname"'[^\/]*)\.pkg\.tar\.(xz|zst)/ && print $1' | ( (echo "$url" | grep -q 'lib32') && sed 's/x86_64/x86/g' || cat)) 297 | echo " -> ID: $id" 298 | check_id $id || return 299 | echo " -> Downloading package" 300 | if ! wget "$url" 2>/dev/null -O "$tmp/pkg"; then 301 | echo >&2 "Failed to download package from $url" 302 | return 303 | fi 304 | echo " -> Extracting package" 305 | pushd "$tmp" 1>/dev/null 306 | if (echo "$url" | grep -q '\.zst') 307 | then 308 | mv pkg pkg.tar.zst 309 | zstd -dq pkg.tar.zst 310 | tar xf pkg.tar --warning=none 311 | fi 312 | if (echo "$url" | grep -q '\.xz') 313 | then 314 | mv pkg pkg.tar.xz 315 | tar xJf pkg.tar.xz --warning=none 316 | fi 317 | popd 1>/dev/null 318 | index_libc "$tmp" "$id" "$info" "$url" 319 | rm -rf "$tmp" 320 | } 321 | 322 | get_all_pkg() { 323 | local info=$1 324 | local directory=$2 325 | local pkgname=$3 326 | echo "Getting package $info locations" 327 | local url="" 328 | for i in $(seq 1 3); do 329 | urls=$(wget "$directory" -O - 2>/dev/null \ 330 | | grep -oh '[^"]*'"$pkgname"'[^"]*\.pkg[^"]*' \ 331 | | grep -v '.sig' \ 332 | | grep -v '>') 333 | [[ -z "$urls" ]] || break 334 | echo "Retrying..." 335 | sleep 1 336 | done 337 | [[ -n "$urls" ]] || die "Failed to get package version" 338 | for url in $urls 339 | do 340 | get_pkg "$directory/$url" "$info" "$pkgname" 341 | sleep .1 342 | done 343 | } 344 | 345 | requirements_pkg() { 346 | which mktemp 1>/dev/null 2>&1 || return 347 | which perl 1>/dev/null 2>&1 || return 348 | which grep 1>/dev/null 2>&1 || return 349 | which sed 1>/dev/null 2>&1 || return 350 | which cat 1>/dev/null 2>&1 || return 351 | which wget 1>/dev/null 2>&1 || return 352 | which zstd 1>/dev/null 2>&1 || return 353 | which tar 1>/dev/null 2>&1 || return 354 | which xz 1>/dev/null 2>&1 || return 355 | return 0 356 | } 357 | 358 | 359 | # ===== Alpine ===== # 360 | 361 | get_apk() { 362 | local url="$1" 363 | local info="$2"229 364 | local pkgname="$3" 365 | local tmp=$(mktemp -d) 366 | echo "Getting $info" 367 | echo " -> Location: $url" 368 | local id=$(echo "$url" | perl -n -e '/('"$pkgname"'[^\/]*)\.apk/ && print $1') 369 | echo " -> ID: $id" 370 | check_id $id || return 371 | echo " -> Downloading package" 372 | if ! wget "$url" 2>/dev/null -O "$tmp/pkg.tar.gz"; then 373 | echo >&2 "Failed to download package from $url" 374 | return 375 | fi 376 | echo " -> Extracting package" 377 | pushd $tmp 1>/dev/null 378 | tar xzf pkg.tar.gz --warning=none 379 | popd 1>/dev/null 380 | index_libc "$tmp" "$id" "$info" "$url" 381 | rm -rf $tmp 382 | } 383 | 384 | get_all_apk() { 385 | local info=$1 386 | local repo=$2 387 | local version=$3 388 | local component=$4 389 | local arch=$5 390 | local pkgname=$6 391 | local directory="$repo/$version/$component/$arch/" 392 | echo "Getting package $info locations" 393 | local url="" 394 | for i in $(seq 1 3); do 395 | urls=$(wget "$directory" -O - 2>/dev/null \ 396 | | grep -oh '[^"]*'"$pkgname"'-[0-9][^"]*\.apk' \ 397 | | grep -v '.sig' \ 398 | | grep -v '>') 399 | [[ -z "$urls" ]] || break 400 | echo "Retrying..." 401 | sleep 1 402 | done 403 | [[ -n "$urls" ]] || die "Failed to get package version" 404 | for url in $urls 405 | do 406 | get_apk "$directory$url" "$info" "$pkgname" 407 | sleep .1 408 | done 409 | } 410 | 411 | requirements_apk() { 412 | which mktemp 1>/dev/null 2>&1 || return 413 | which perl 1>/dev/null 2>&1 || return 414 | which wget 1>/dev/null 2>&1 || return 415 | which tar 1>/dev/null 2>&1 || return 416 | which gzip 1>/dev/null 2>&1 || return 417 | which grep 1>/dev/null 2>&1 || return 418 | return 0 419 | } 420 | 421 | # ===== Launchpad ===== 422 | 423 | get_all_launchpad() { 424 | local info="$1" 425 | local distro="$2" 426 | local pkgname="$3" 427 | local arch="$4" 428 | 429 | local series="" 430 | for series in $(wget "https://api.launchpad.net/1.0/$distro/series" -O - 2>/dev/null | jq '.entries[] | select( .status != "Obsolete" ) | .name'); do 431 | series=$(echo $series | grep -Eo '[^"]+') 432 | echo "Launchpad: Series $series" 433 | local apiurl="https://api.launchpad.net/1.0/$distro/+archive/primary?ws.op=getPublishedBinaries&binary_name=$pkgname&exact_match=true&distro_arch_series=https://api.launchpad.net/1.0/$distro/$series/$arch" 434 | local url="" 435 | urls=$(wget "$apiurl" -O - 2>/dev/null | jq '[ .entries[] | .build_link + "/+files/" + .binary_package_name + "_" + .source_package_version + "_" + (.distro_arch_series_link | split("/") | .[-1]) + ".deb" | ltrimstr("https://api.launchpad.net/1.0/") | "https://launchpad.net/" + . ] | unique | .[]') 436 | for url in $urls; do 437 | url=$(echo $url | grep -Eo '[^"]+') 438 | # some old packages are deleted. ignore those. 439 | get_debian "$url" "$info-$series" "$pkgname" 440 | done 441 | done 442 | } 443 | 444 | requirements_launchpad() { 445 | which jq 1>/dev/null 2>&1 || return 446 | requirements_debian || return 447 | return 0 448 | } 449 | 450 | # ===== Local ===== # 451 | 452 | add_local() { 453 | local libc=$1 454 | [[ -f $libc ]] || return 455 | local info="local" 456 | local id="local-`sha1sum $libc`" 457 | echo "Adding local libc $libc (id $id)" 458 | check_id $id || return 459 | process_libc $libc $id $info 460 | } 461 | 462 | requirements_local() { 463 | which sha1sum 1>/dev/null 2>&1 || return 464 | return 0 465 | } 466 | -------------------------------------------------------------------------------- /download: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd "$(dirname "$0")" 3 | 4 | die() { 5 | echo >&2 $1 6 | exit 1 7 | } 8 | 9 | usage() { 10 | echo >&2 "Usage: $0 id" 11 | exit 2 12 | } 13 | 14 | download_single() { 15 | local id=$1 16 | echo "Getting $id" 17 | if [ -d "libs/$id" ]; then 18 | die " --> Downloaded before. Remove it to download again." 19 | fi 20 | 21 | if [ ! -f "db/$1.url" ]; then 22 | die "Invalid ID, maybe the library was fetched in an older version or added manually?" 23 | fi 24 | 25 | local url="$(cat "db/$1.url")" 26 | echo " -> Location: $url" 27 | local tmp=`mktemp -d` 28 | echo " -> Downloading package" 29 | wget "$url" 2>/dev/null -O $tmp/pkg.deb || die "Failed to download package from $url" 30 | echo " -> Extracting package" 31 | 32 | pushd $tmp 1>/dev/null 33 | ar x pkg.deb || die "ar failed" 34 | tar xf data.tar.* || die "tar failed" 35 | popd 1>/dev/null 36 | 37 | mkdir libs/$id 38 | cp $tmp/lib/*/* libs/$id 2>/dev/null || cp $tmp/lib32/* libs/$id 2>/dev/null \ 39 | || die "Failed to save. Check it manually $tmp" 40 | echo " -> Package saved to libs/$id" 41 | 42 | rm -rf $tmp 43 | } 44 | 45 | if [[ $# != 1 ]]; then 46 | usage 47 | fi 48 | download_single "$1" 49 | -------------------------------------------------------------------------------- /dump: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd "$(dirname "$0")" 3 | . common/libc.sh 4 | if [[ $# < 1 ]]; then 5 | echo >&2 "Usage: $0 id [name1 [name2 ...]]" 6 | exit 2 7 | fi 8 | id=$1 9 | shift 1 10 | if [[ $# == 0 ]]; then 11 | names="__libc_start_main_ret system dup2 read write str_bin_sh" 12 | else 13 | names="$@" 14 | fi 15 | ls -1 "db/${id}."* >/dev/null 2>&1 || die "Invalid ID '$id'" 16 | for name in $names; do 17 | offset=`cat db/${id}.symbols | grep "^$name " | cut -d' ' -f2` 18 | [ -z "$offset" ] && die "Invalid symbol '$name'" 19 | echo "offset_${name} = 0x${offset}" 20 | done 21 | -------------------------------------------------------------------------------- /find: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd "$(dirname "$0")" 3 | function usage() { 4 | echo >&2 "Usage: $0 name address [name address ...]" 5 | exit 2 6 | } 7 | 8 | function find_single() { 9 | name=$1 10 | address=$2 11 | addr_last12=`echo -n "$address" | tail -c 3` 12 | grep -i -e "^$name .*$addr_last12$" db/*.symbols \ 13 | | perl -n -e '/db\/(.*)\.symbols/ && print "$1\n"' \ 14 | | sort 15 | } 16 | 17 | function find() { 18 | [[ $# -lt 2 ]] && usage 19 | name=$1; shift 20 | address=$1; shift 21 | if [[ $# == 0 ]]; then 22 | find_single $name $address 23 | else 24 | comm -12 \ 25 | <(find_single $name $address) \ 26 | <(find "$@") 27 | fi 28 | } 29 | 30 | ret=1 31 | for id in `find "$@"`; do 32 | echo "`cat db/${id}.info` ($id)" 33 | ret=0 34 | done 35 | exit $ret 36 | -------------------------------------------------------------------------------- /get: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd "$(dirname "$0")" 3 | . common/libc.sh 4 | 5 | cntr_category=1 6 | declare -a categories 7 | declare -A requirements 8 | 9 | categories[cntr_category]="ubuntu" 10 | requirements["ubuntu"]="requirements_debian" 11 | cntr_category=$((cntr_category + 1)) 12 | ubuntu() { 13 | get_all_debian ubuntu-eglibc http://archive.ubuntu.com/ubuntu/pool/main/e/eglibc/ libc6 14 | get_all_debian ubuntu-glibc http://archive.ubuntu.com/ubuntu/pool/main/g/glibc/ libc6 15 | get_all_debian ubuntu-musl http://archive.ubuntu.com/ubuntu/pool/universe/m/musl/ musl 16 | get_all_debian ubuntu-dietlibc http://archive.ubuntu.com/ubuntu/pool/universe/d/dietlibc/ dietlibc 17 | get_all_debian ubuntu-security-eglibc http://security.ubuntu.com/ubuntu/pool/main/e/eglibc/ libc6 18 | get_all_debian ubuntu-security-glibc http://security.ubuntu.com/ubuntu/pool/main/g/glibc/ libc6 19 | get_all_debian ubuntu-security-musl http://security.ubuntu.com/ubuntu/pool/universe/m/musl/ musl 20 | get_all_debian ubuntu-security-dietlibc http://security.ubuntu.com/ubuntu/pool/universe/d/dietlibc/ dietlibc 21 | get_all_debian ubuntu-old-eglibc http://old-releases.ubuntu.com/ubuntu/pool/main/e/eglibc/ libc6 22 | get_all_debian ubuntu-old-glibc http://old-releases.ubuntu.com/ubuntu/pool/main/g/glibc/ libc6 23 | get_all_debian ubuntu-old-musl http://old-releases.ubuntu.com/ubuntu/pool/universe/m/musl/ musl 24 | get_all_debian ubuntu-old-dietlibc http://old-releases.ubuntu.com/ubuntu/pool/universe/d/dietlibc/ dietlibc 25 | } 26 | 27 | categories[cntr_category]="debian" 28 | requirements["debian"]="requirements_debian" 29 | cntr_category=$((cntr_category + 1)) 30 | debian() { 31 | get_all_debian debian-glibc https://deb.debian.org/debian/pool/main/g/glibc/ libc6 32 | get_all_debian debian-musl https://deb.debian.org/debian/pool/main/m/musl/ musl 33 | get_all_debian debian-dietlibc https://deb.debian.org/debian/pool/main/d/dietlibc/ dietlibc 34 | } 35 | 36 | categories[cntr_category]="rpm" 37 | requirements["rpm"]="requirements_rpm" 38 | cntr_category=$((cntr_category + 1)) 39 | rpm() { 40 | get_all_rpm rpm glibc libc x86_64 41 | get_all_rpm rpm glibc libc i586 42 | get_all_rpm rpm glibc libc i686 43 | get_all_rpm rpm musl musl x86_64 44 | get_all_rpm rpm musl musl i586 45 | get_all_rpm rpm musl musl i686 46 | } 47 | 48 | categories[cntr_category]="centos" 49 | requirements["centos"]="requirements_centos" 50 | cntr_category=$((cntr_category + 1)) 51 | centos() { 52 | get_from_filelistgz centos-glibc https://vault.centos.org/ glibc i686 53 | get_from_filelistgz centos-glibc https://vault.centos.org/ glibc x86_64 54 | } 55 | 56 | categories[cntr_category]="centos_stream" 57 | requirements["centos_stream"]="requirements_centos_stream" 58 | cntr_category=$((cntr_category + 1)) 59 | centos_stream() { 60 | get_from_fullfiletimelist centos-glibc https://mirror.stream.centos.org/ glibc i686 61 | get_from_fullfiletimelist centos-glibc https://mirror.stream.centos.org/ glibc x86_64 62 | } 63 | 64 | categories[cntr_category]="arch" 65 | requirements["arch"]="requirements_pkg" 66 | cntr_category=$((cntr_category + 1)) 67 | arch() { 68 | get_all_pkg arch-glibc https://archive.archlinux.org/packages/g/glibc/ libc 69 | get_all_pkg arch-lib32-glibc https://archive.archlinux.org/packages/l/lib32-glibc/ libc 70 | get_all_pkg arch-musl https://archive.archlinux.org/packages/m/musl/ musl 71 | } 72 | 73 | categories[cntr_category]="alpine" 74 | requirements["alpine"]="requirements_apk" 75 | cntr_category=$((cntr_category + 1)) 76 | alpine() { 77 | alpine_versions=( 78 | latest-stable 79 | edge 80 | v3.0 81 | v3.1 82 | v3.2 83 | v3.3 84 | v3.4 85 | v3.5 86 | v3.6 87 | v3.7 88 | v3.8 89 | v3.9 90 | v3.10 91 | v3.11 92 | v3.12 93 | v3.13 94 | v3.14 95 | v3.15 96 | v3.16 97 | v3.17 98 | v3.18 99 | v3.19 100 | ) 101 | 102 | for version in "${alpine_versions[@]}"; do 103 | get_all_apk alpine-musl http://dl-cdn.alpinelinux.org/alpine/ "$version" main x86_64 musl 104 | get_all_apk alpine-musl http://dl-cdn.alpinelinux.org/alpine/ "$version" main x86 musl 105 | done 106 | } 107 | 108 | categories[cntr_category]="kali" 109 | requirements["kali"]="requirements_debian" 110 | cntr_category=$((cntr_category + 1)) 111 | kali() { 112 | get_all_debian kali-glibc https://http.kali.org/pool/main/g/glibc/ libc6 113 | get_all_debian kali-musl https://http.kali.org/pool/main/m/musl/ musl 114 | } 115 | categories[cntr_category]="parrotsec" 116 | requirements["parrotsec"]="requirements_debian" 117 | cntr_category=$((cntr_category + 1)) 118 | parrotsec() { 119 | get_all_debian parrotsec-glibc https://download.parrot.sh/parrot/pool/main/g/glibc/ libc6 120 | get_all_debian parrotsec-musl https://download.parrot.sh/parrot/pool/main/m/musl/ musl 121 | } 122 | categories[cntr_category]="launchpad" 123 | requirements["launchpad"]="requirements_launchpad" 124 | cntr_category=$((cntr_category + 1)) 125 | launchpad() { 126 | get_all_launchpad launchpad-ubuntu-glibc ubuntu libc6 amd64 127 | get_all_launchpad launchpad-ubuntu-glibc ubuntu libc6 i386 128 | } 129 | 130 | help() { 131 | exec 1>&2 132 | echo "Please specify libc categories to download:" 133 | for category in "${categories[@]}" ; do 134 | echo -e "\t* $category" 135 | done 136 | echo "" 137 | echo "You may also specify 'all' to download all categories available." 138 | echo "" 139 | echo "Example:" 140 | echo "" 141 | echo "$ ./get ubuntu rpm arch" 142 | echo "$ ./get all" 143 | exit 1 144 | } 145 | 146 | if [[ "$#" -eq 0 ]] ; then 147 | help 148 | fi 149 | if [[ " $@ " == *" all "* ]] ; then 150 | set -- "${categories[@]}" 151 | fi 152 | 153 | # Verify arguments, requirements, and display a recap 154 | requirements_general || die "General requirements are not met. Please, refer to README.md for installation instructions" 155 | echo "Will download or update for:" 156 | for category in "$@" ; do 157 | if [[ ! " ${categories[@]} " == *" ${category} "* ]] ; then 158 | die "Invalid category '$category'" 159 | fi 160 | ${requirements[$category]} || die "Requirements for download or update '$category' are not met. Please, refer to README.md for installation instructions" 161 | echo -e "\t* $category ; Requirements are met" 162 | done 163 | 164 | 165 | # Let's start :) 166 | for category in "$@" ; do 167 | echo "Downloading/updating $category" 168 | $category 169 | done 170 | -------------------------------------------------------------------------------- /identify: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -o pipefail 3 | help() { 4 | echo >&2 "Usage: $0 path/to/libc.so" 5 | echo >&2 " OR $0 bid=" 6 | echo >&2 " OR $0 md5=" 7 | echo >&2 " OR $0 sha1=" 8 | echo >&2 " OR $0 sha256=" 9 | exit 2 10 | } 11 | 12 | if [[ $# != 1 ]]; then 13 | help 14 | fi 15 | 16 | arg="$1" 17 | 18 | if [ -f "$arg" ]; then 19 | libc="$(readlink -f "$arg")" 20 | fi 21 | 22 | cd "$(dirname "$0")" 23 | 24 | if [[ -f "$libc" ]]; then 25 | arg="sha1=$(sha1sum "$libc" | awk '{print $1}')" 26 | fi 27 | 28 | case "$arg" in 29 | bid=*) 30 | hash="${arg#"bid="}" 31 | tool="file" 32 | regex="=$hash," 33 | ;; 34 | md5=*) 35 | hash="${arg#"md5="}" 36 | tool="md5sum" 37 | regex="$hash " 38 | ;; 39 | sha1=*) 40 | hash="${arg#"sha1="}" 41 | tool="sha1sum" 42 | regex="$hash " 43 | ;; 44 | sha256=*) 45 | hash="${arg#"sha256="}" 46 | tool="sha256sum" 47 | regex="$hash " 48 | ;; 49 | *) 50 | help 51 | esac 52 | 53 | ls -1 db/*.so | xargs $tool | grep -- "$regex" | perl -n -e '/([^\/: ]+)\.so/&&print "$1\n"' 54 | -------------------------------------------------------------------------------- /libs/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !/.gitignore 3 | -------------------------------------------------------------------------------- /searchengine/.dockerignore: -------------------------------------------------------------------------------- 1 | **/__pycache__/ 2 | **/node_modules/ 3 | -------------------------------------------------------------------------------- /searchengine/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | -------------------------------------------------------------------------------- /searchengine/DEVELOPMENT.md: -------------------------------------------------------------------------------- 1 | # libc-database web service development 2 | 3 | ## Local deployment 4 | 5 | 1. Populate libc db with `./get` or `./add`. 6 | 7 | 1. Start the server with docker (must be run from the `searchengine/` directory): 8 | 9 | ```sh 10 | docker-compose up --build 11 | ``` 12 | 13 | 1. Install pip dependencies from `requirements.txt`. 14 | 15 | Example with venv: 16 | 17 | ```sh 18 | python -m venv venv 19 | . venv/bin/activate 20 | pip install -U pip 21 | pip install -U -r requirements.txt 22 | ``` 23 | 24 | 1. Index libc db in elasticsearch (must be run from the `searchengine/` directory): 25 | 26 | ```sh 27 | python -m index ../db 28 | ``` 29 | 30 | ### Cleaning up 31 | 32 | To remove all development files run: 33 | 34 | ```sh 35 | docker-compose down --volumes --rmi all 36 | ``` 37 | 38 | 39 | ## UWSGI logging 40 | 41 | To get app logs from UWSGI: 42 | 43 | ```sh 44 | docker logs searchengine_uwsgi_1 45 | ``` 46 | -------------------------------------------------------------------------------- /searchengine/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10-slim-bullseye AS uwsgi 2 | 3 | ENV PYTHONUNBUFFERED 1 4 | WORKDIR /app 5 | 6 | RUN apt-get -y update && apt-get install -y gcc 7 | 8 | COPY requirements.txt /app/requirements.txt 9 | RUN pip install -r requirements.txt 10 | 11 | COPY . /app 12 | 13 | EXPOSE 8000 14 | 15 | CMD uwsgi --ini uwsgi.ini 16 | -------------------------------------------------------------------------------- /searchengine/README.md: -------------------------------------------------------------------------------- 1 | # libc-database web service 2 | 3 | Full OpenAPI spec: https://github.com/niklasb/libc-database/blob/master/searchengine/api.yml 4 | 5 | Hosted at https://libc.rip/api/ 6 | 7 | 8 | You can search by symbol: 9 | 10 | ``` 11 | $ curl -X POST -H 'Content-Type: application/json' --data \ 12 | '{"symbols": {"strncpy": "db0", "strcat": "0x000000000d800"}}' \ 13 | 'https://libc.rip/api/find' 14 | [ 15 | { 16 | "buildid": "d3cf764b2f97ac3efe366ddd07ad902fb6928fd7", 17 | "download_url": "https://libc.rip/download/libc6_2.27-3ubuntu1.2_amd64.so", 18 | "id": "libc6_2.27-3ubuntu1.2_amd64", 19 | "md5": "35ef4ffc9c6ad7ffd1fd8c16f14dc766", 20 | "sha1": "a22321cd65f28f70cf321614fdfd22f36ecd0afe", 21 | "sha256": "f0ad9639b2530741046e06c96270b25da2339b6c15a7ae46de8fb021b3c4f529", 22 | "symbols": { 23 | ... 24 | } 25 | } 26 | ] 27 | ``` 28 | 29 | Or by hash (buildid, sha1, md5, sha256): 30 | 31 | ``` 32 | $ curl -X POST -H 'Content-Type: application/json' --data \ 33 | '{"sha1": "15ecf5c58e8749650d5fe5d641f77f3a0fffab16"}' \ 34 | 'https://libc.rip/api/find' 35 | [ 36 | { 37 | "buildid": "5ae879fe5a9ff3e6622cf0dbb19fc3a80b78ec9f", 38 | "download_url": "https://libc.rip/download/libc6_2.10.1-0ubuntu19_i386.so", 39 | "id": "libc6_2.10.1-0ubuntu19_i386", 40 | "md5": "8cf4746dab814f23cbc93aee208b19e3", 41 | "sha1": "15ecf5c58e8749650d5fe5d641f77f3a0fffab16", 42 | "sha256": "be5d51dbb3c96196b4b94ff04aa9cdd54fe80e3d3dd95cca1cf4d615c251ef5d", 43 | "symbols": { 44 | ... 45 | } 46 | } 47 | ] 48 | ``` 49 | 50 | Or a combination (combined via AND): 51 | 52 | ``` 53 | $ curl -X POST -H 'Content-Type: application/json' --data \ 54 | '{"sha1": "15ecf5c58e8749650d5fe5d641f77f3a0fffab16", "buildid": "5ae879fe5a9ff3e6622cf0dbb19fc3a80b78ec9f", "symbols": {"dup2": "690", "puts": "2b0"}}' \ 55 | 'https://libc.rip/api/find' 56 | [ 57 | { 58 | "buildid": "5ae879fe5a9ff3e6622cf0dbb19fc3a80b78ec9f", 59 | "download_url": "https://libc.rip/download/libc6_2.10.1-0ubuntu19_i386.so", 60 | "id": "libc6_2.10.1-0ubuntu19_i386", 61 | "md5": "8cf4746dab814f23cbc93aee208b19e3", 62 | "sha1": "15ecf5c58e8749650d5fe5d641f77f3a0fffab16", 63 | "sha256": "be5d51dbb3c96196b4b94ff04aa9cdd54fe80e3d3dd95cca1cf4d615c251ef5d", 64 | "symbols": { 65 | ... 66 | } 67 | } 68 | ] 69 | ``` 70 | 71 | 72 | To dump a specific set of symbols, given the id of the library: 73 | 74 | ``` 75 | $ curl -X POST -H 'Content-Type: application/json' \ 76 | --data '{"symbols": ["strcat"]}' \ 77 | 'https://libc.rip/api/libc/libc6_2.27-3ubuntu1.2_amd64' 78 | { 79 | "buildid": "d3cf764b2f97ac3efe366ddd07ad902fb6928fd7", 80 | "download_url": "https://libc.rip/download/libc6_2.27-3ubuntu1.2_amd64.so", 81 | "id": "libc6_2.27-3ubuntu1.2_amd64", 82 | "md5": "35ef4ffc9c6ad7ffd1fd8c16f14dc766", 83 | "sha1": "a22321cd65f28f70cf321614fdfd22f36ecd0afe", 84 | "sha256": "f0ad9639b2530741046e06c96270b25da2339b6c15a7ae46de8fb021b3c4f529", 85 | "symbols": { 86 | "__libc_start_main_ret": "0x21b97", 87 | "dup2": "0x110ab0", 88 | "printf": "0x64f00", 89 | "puts": "0x80a30", 90 | "read": "0x110180", 91 | "str_bin_sh": "0x1b40fa", 92 | "strcat": "0x9d800", 93 | "system": "0x4f4e0", 94 | "write": "0x110250" 95 | } 96 | } 97 | ``` 98 | -------------------------------------------------------------------------------- /searchengine/api.yml: -------------------------------------------------------------------------------- 1 | openapi: 3.0.3 2 | info: 3 | version: 1.0.0 4 | title: libc-database search engine 5 | description: '' 6 | paths: 7 | /find: 8 | post: 9 | operationId: app.find 10 | tags: 11 | - libcsearch 12 | description: |- 13 | Look up libc by various attributes 14 | requestBody: 15 | content: 16 | application/json: 17 | schema: 18 | $ref: '#/components/schemas/Libc' 19 | required: true 20 | responses: 21 | '200': 22 | content: 23 | application/json: 24 | schema: 25 | type: array 26 | items: 27 | $ref: '#/components/schemas/Libc' 28 | description: '' 29 | 30 | /libc/{id}: 31 | post: 32 | operationId: app.dump 33 | tags: 34 | - libcsearch 35 | parameters: 36 | - in: path 37 | name: id 38 | schema: 39 | type: string 40 | required: true 41 | description: |- 42 | Dump libc symbols 43 | requestBody: 44 | content: 45 | application/json: 46 | schema: 47 | $ref: '#/components/schemas/DumpRequest' 48 | required: true 49 | responses: 50 | '200': 51 | content: 52 | application/json: 53 | schema: 54 | $ref: '#/components/schemas/Libc' 55 | description: '' 56 | 57 | components: 58 | schemas: 59 | Libc: 60 | type: object 61 | properties: 62 | md5: 63 | type: string 64 | sha1: 65 | type: string 66 | sha256: 67 | type: string 68 | buildid: 69 | type: string 70 | id: 71 | type: string 72 | symbols: 73 | type: object 74 | additionalProperties: 75 | type: string 76 | pattern: '^(0x)?[a-fA-F0-9]+$' 77 | download_url: 78 | type: string 79 | format: url 80 | readOnly: true 81 | symbols_url: 82 | type: string 83 | format: url 84 | readOnly: true 85 | libs_url: 86 | type: string 87 | format: url 88 | readOnly: true 89 | 90 | DumpRequest: 91 | type: object 92 | properties: 93 | symbols: 94 | type: array 95 | items: 96 | type: string 97 | 98 | 99 | servers: 100 | - url: https://libc.rip/api 101 | -------------------------------------------------------------------------------- /searchengine/app.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from functools import lru_cache 3 | 4 | import connexion 5 | from elasticsearch import Elasticsearch 6 | from flask_cors import CORS 7 | 8 | import config 9 | 10 | 11 | es = Elasticsearch(hosts=[config.ES_HOST]) 12 | log = logging.getLogger('wsgi') 13 | 14 | log.info(f'Using elasticsearch server {config.ES_HOST}, index {config.ES_INDEX_NAME}') 15 | 16 | 17 | @lru_cache(maxsize=2000) 18 | def get_symbols(id): 19 | syms = {} 20 | with open(f'{config.DB_DIR}/{id}.symbols') as f: 21 | for line in f: 22 | if not line.strip(): 23 | continue 24 | name, addr = line.split() 25 | addr = int(addr, 16) 26 | syms[name] = addr 27 | return syms 28 | 29 | 30 | @lru_cache(maxsize=2000) 31 | def get_libs_url(id): 32 | with open(f'{config.DB_DIR}/{id}.url') as f: 33 | return f.read().strip() 34 | 35 | 36 | def find(body, extra_symbols=[]): 37 | filters = [] 38 | 39 | for h in ('id', 'md5', 'sha1', 'sha256', 'buildid'): 40 | if h in body: 41 | filters.append({'match': {h: body[h]}}) 42 | 43 | symbol_filters = body.get('symbols') 44 | if symbol_filters: 45 | terms = [] 46 | for sym, addr in symbol_filters.items(): 47 | addr = int(addr, 16) 48 | term = f'{sym}@{addr & 0xfff:03x}' 49 | filters.append({'term': {'symbols': term}}) 50 | 51 | 52 | if not filters: 53 | return connexion.problem( 54 | status=400, 55 | title='Bad request', 56 | detail='must provide at least one filter', 57 | ) 58 | 59 | query = {"bool": {"filter": filters}} 60 | res = es.search(index=config.ES_INDEX_NAME, query=query) 61 | 62 | libcs = [] 63 | for hit in res['hits']['hits']: 64 | doc = hit['_source'] 65 | id = doc['id'] 66 | syms = get_symbols(id) 67 | 68 | result_symbols = {} 69 | 70 | names = list(config.DEFAULT_SYMBOLS) + extra_symbols 71 | if symbol_filters: 72 | names += symbol_filters.keys() 73 | for name in names: 74 | if name in syms: 75 | result_symbols[name] = f'{syms[name]:#x}' 76 | 77 | libcs.append({ 78 | 'id': id, 79 | 'buildid': doc.get('buildid'), 80 | 'sha1': doc.get('sha1'), 81 | 'md5': doc.get('md5'), 82 | 'sha256': doc.get('sha256'), 83 | 'symbols': result_symbols, 84 | 'download_url': config.DOWNLOAD_URL.format(id), 85 | 'symbols_url': config.ALL_SYMBOLS_URL.format(id), 86 | 'libs_url': get_libs_url(id), 87 | }) 88 | return libcs 89 | 90 | 91 | def dump(id, body): 92 | res = find({'id': id}, extra_symbols=body['symbols']) 93 | if not res: 94 | return connexion.problem( 95 | status=404, 96 | title='Not found', 97 | detail=f'Unknown ID: {id}' 98 | ) 99 | 100 | return res[0] 101 | 102 | 103 | app = connexion.App(__name__, specification_dir='.') 104 | app.add_api('api.yml') 105 | CORS(app.app) 106 | 107 | if __name__ == '__main__': 108 | app.run(port=8080, host='127.0.0.1', debug=True) 109 | -------------------------------------------------------------------------------- /searchengine/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | ES_INDEX_NAME = 'libcsearch' 4 | 5 | # ES_HOST = 'http://localhost:9200' 6 | ES_HOST = 'http://es01:9200' 7 | 8 | # DB_DIR = os.path.dirname(os.path.abspath(__file__)) + '/../db' 9 | DB_DIR = '/db' 10 | 11 | DEFAULT_SYMBOLS = [ 12 | '__libc_start_main_ret', 13 | 'system', 14 | 'dup2', 15 | 'str_bin_sh', 16 | 'read', 17 | 'write', 18 | 'puts', 19 | 'printf', 20 | ] 21 | DOWNLOAD_URL = 'https://libc.rip/download/{}.so' 22 | ALL_SYMBOLS_URL = 'https://libc.rip/download/{}.symbols' 23 | -------------------------------------------------------------------------------- /searchengine/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.4' 2 | services: 3 | 4 | uwsgi: 5 | build: 6 | context: . 7 | target: uwsgi 8 | volumes: 9 | - ../db:/db 10 | ports: 11 | - 127.0.0.1:8000:8000 12 | networks: 13 | - libcsearch 14 | restart: always 15 | logging: 16 | driver: syslog 17 | options: 18 | tag: uwsgi 19 | 20 | es01: 21 | image: docker.elastic.co/elasticsearch/elasticsearch:7.17.10 22 | container_name: es01 23 | environment: 24 | - node.name=es01 25 | - discovery.type=single-node 26 | - bootstrap.memory_lock=true 27 | - "ES_JAVA_OPTS=-Xms512m -Xmx512m" 28 | ulimits: 29 | memlock: 30 | soft: -1 31 | hard: -1 32 | volumes: 33 | - data01:/usr/share/elasticsearch/data 34 | ports: 35 | - 127.0.0.1:9200:9200 36 | networks: 37 | - libcsearch 38 | restart: always 39 | logging: 40 | driver: syslog 41 | options: 42 | tag: elasticsearch 43 | 44 | volumes: 45 | data01: 46 | driver: local 47 | 48 | networks: 49 | libcsearch: 50 | -------------------------------------------------------------------------------- /searchengine/frontend/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | -------------------------------------------------------------------------------- /searchengine/frontend/README.md: -------------------------------------------------------------------------------- 1 | This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). 2 | 3 | ## Available Scripts 4 | 5 | In the project directory, you can run: 6 | 7 | ### `npm start` 8 | 9 | Runs the app in the development mode.
10 | Open [http://localhost:3000](http://localhost:3000) to view it in the browser. 11 | 12 | The page will reload if you make edits.
13 | You will also see any lint errors in the console. 14 | 15 | ### `npm test` 16 | 17 | Launches the test runner in the interactive watch mode.
18 | See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information. 19 | 20 | ### `npm run build` 21 | 22 | Builds the app for production to the `build` folder.
23 | It correctly bundles React in production mode and optimizes the build for the best performance. 24 | 25 | The build is minified and the filenames include the hashes.
26 | Your app is ready to be deployed! 27 | 28 | See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information. 29 | 30 | ### `npm run eject` 31 | 32 | **Note: this is a one-way operation. Once you `eject`, you can’t go back!** 33 | 34 | If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project. 35 | 36 | Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own. 37 | 38 | You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it. 39 | 40 | ## Learn More 41 | 42 | You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started). 43 | 44 | To learn React, check out the [React documentation](https://reactjs.org/). 45 | 46 | ### Code Splitting 47 | 48 | This section has moved here: https://facebook.github.io/create-react-app/docs/code-splitting 49 | 50 | ### Analyzing the Bundle Size 51 | 52 | This section has moved here: https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size 53 | 54 | ### Making a Progressive Web App 55 | 56 | This section has moved here: https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app 57 | 58 | ### Advanced Configuration 59 | 60 | This section has moved here: https://facebook.github.io/create-react-app/docs/advanced-configuration 61 | 62 | ### Deployment 63 | 64 | This section has moved here: https://facebook.github.io/create-react-app/docs/deployment 65 | 66 | ### `npm run build` fails to minify 67 | 68 | This section has moved here: https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify 69 | -------------------------------------------------------------------------------- /searchengine/frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "frontend", 3 | "version": "0.1.0", 4 | "private": true, 5 | "dependencies": { 6 | "@material-ui/core": "^4.11.0", 7 | "@testing-library/jest-dom": "^4.2.4", 8 | "@testing-library/react": "^9.5.0", 9 | "@testing-library/user-event": "^7.2.1", 10 | "fontsource-roboto": "^3.0.3", 11 | "react": "^16.13.1", 12 | "react-dom": "^16.13.1", 13 | "react-scripts": "3.4.3" 14 | }, 15 | "scripts": { 16 | "start": "react-scripts start", 17 | "build": "react-scripts build", 18 | "test": "react-scripts test", 19 | "eject": "react-scripts eject" 20 | }, 21 | "eslintConfig": { 22 | "extends": "react-app" 23 | }, 24 | "browserslist": { 25 | "production": [ 26 | ">0.2%", 27 | "not dead", 28 | "not op_mini all" 29 | ], 30 | "development": [ 31 | "last 1 chrome version", 32 | "last 1 firefox version", 33 | "last 1 safari version" 34 | ] 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /searchengine/frontend/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 12 | 13 | 17 | 18 | 27 | libc-database 28 | 29 | 30 | 31 |
32 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /searchengine/frontend/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "React App", 3 | "name": "Create React App Sample", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | }, 10 | { 11 | "src": "logo192.png", 12 | "type": "image/png", 13 | "sizes": "192x192" 14 | }, 15 | { 16 | "src": "logo512.png", 17 | "type": "image/png", 18 | "sizes": "512x512" 19 | } 20 | ], 21 | "start_url": ".", 22 | "display": "standalone", 23 | "theme_color": "#000000", 24 | "background_color": "#ffffff" 25 | } 26 | -------------------------------------------------------------------------------- /searchengine/frontend/public/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | Disallow: 4 | -------------------------------------------------------------------------------- /searchengine/frontend/src/App.css: -------------------------------------------------------------------------------- 1 | .App { 2 | /*text-align: center;*/ 3 | padding: 1rem; 4 | font-size: 90%; 5 | } 6 | -------------------------------------------------------------------------------- /searchengine/frontend/src/App.js: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect, useCallback, useRef } from 'react'; 2 | import './App.css'; 3 | import 'fontsource-roboto'; 4 | import Button from '@material-ui/core/Button'; 5 | import Grid from '@material-ui/core/Grid'; 6 | import TextField from '@material-ui/core/TextField'; 7 | import Link from '@material-ui/core/Link'; 8 | import CircularProgress from '@material-ui/core/CircularProgress'; 9 | import Table from '@material-ui/core/Table'; 10 | import TableBody from '@material-ui/core/TableBody'; 11 | import TableCell from '@material-ui/core/TableCell'; 12 | import TableRow from '@material-ui/core/TableRow'; 13 | 14 | import { makeStyles } from '@material-ui/core/styles'; 15 | 16 | 17 | const API_BASE = 'https://libc.rip/api'; 18 | 19 | const api = async (path, data) => { 20 | let resp = await fetch(`${API_BASE}${path}`, { 21 | method: 'POST', 22 | mode: 'cors', 23 | cache: 'no-cache', 24 | headers: { 25 | 'Content-Type': 'application/json' 26 | }, 27 | body: JSON.stringify(data), 28 | }); 29 | return await resp.json(); 30 | }; 31 | 32 | const useStyles = makeStyles((theme) => ({ 33 | root: { 34 | '& .MuiTextField-root': { 35 | margin: theme.spacing(1), 36 | }, 37 | '& .MuiButton-root': { 38 | margin: theme.spacing(1), 39 | }, 40 | '& .remove': { 41 | marginTop: '1.2rem', 42 | height: '2rem', 43 | }, 44 | '& .findbutton': { 45 | marginTop: '1.2rem', 46 | }, 47 | }, 48 | table: { 49 | marginTop: '1rem', 50 | marginBottom: '1rem', 51 | } 52 | })); 53 | 54 | 55 | function SearchRow({ onChange = () => {}, onRemove = () => {} }) { 56 | const [symbol, setSymbol] = useState(""); 57 | const [address, setAddress] = useState(""); 58 | const [addressValid, setAddressValid] = useState(true); 59 | 60 | const onSymbolChange = useCallback((evt) => { 61 | setSymbol(evt.target.value); 62 | }, []); 63 | 64 | const onAddressChange = useCallback((evt) => { 65 | setAddress(evt.target.value); 66 | }, []); 67 | 68 | useEffect(() => { 69 | const valid = !!address.match(/^(0x)?[0-9a-fA-F]*$/); 70 | setAddressValid(valid); 71 | onChange({valid, symbol, address}); 72 | }, [address, symbol, onChange]); 73 | 74 | return ( 75 |
76 | 77 | 78 | 81 |
82 | ); 83 | } 84 | 85 | function SearchForm({ onSearch = () => {} }) { 86 | const classes = useStyles(); 87 | const [nextId, setNextId] = useState(0); 88 | const [rows, setRows] = useState([]); 89 | const [states, setStates] = useState({}); 90 | const onRemoveRef = useRef(); 91 | const onChangeRef = useRef(); 92 | 93 | const makeRow = (id) => { 94 | return ( 95 | onRemoveRef.current(id)} 97 | onChange={(obj) => onChangeRef.current(id, obj)} />); 98 | }; 99 | 100 | const isEmpty = useCallback((i) => { 101 | let state = states[rows[i].key]; 102 | return !state || (!state.symbol && !state.address); 103 | }, [rows, states]); 104 | 105 | // Add new empty rows automatically 106 | useEffect(() => { 107 | let need = true; 108 | for (let i = 0; i < rows.length; ++i) { 109 | if (isEmpty(i)) { 110 | need = false; 111 | break; 112 | } 113 | } 114 | 115 | if (need) { 116 | setRows(rows => rows.concat([makeRow('' + nextId)])); 117 | setNextId(id => id + 1); 118 | } 119 | }, [rows, states, nextId, isEmpty]); 120 | 121 | // Remove superfluous rows at the end 122 | useEffect(() => { 123 | let i = rows.length - 1; 124 | while (i >= 1 && isEmpty(i) && isEmpty(i-1)) { 125 | --i; 126 | } 127 | if (i < rows.length - 1) { 128 | setRows(rows => rows.slice(0, i+1)); 129 | } 130 | }, [rows, states, nextId, isEmpty]); 131 | 132 | const onRemove = useCallback((id) => { 133 | for (let i = 0; i < rows.length; ++i) { 134 | if (rows[i].key === id) { 135 | setRows(rows.slice(0, i).concat(rows.slice(i+1))); 136 | return; 137 | } 138 | } 139 | }, [rows]); 140 | 141 | const onChange = useCallback((id, obj) => { 142 | setStates({...states, [id]: obj}); 143 | }, [states]); 144 | 145 | onChangeRef.current = onChange; 146 | onRemoveRef.current = onRemove; 147 | 148 | const onSubmit = useCallback(() => { 149 | let symbols = {}; 150 | for (let row of rows) { 151 | let state = states[row.key]; 152 | if (state && state.valid && state.address && state.symbol) { 153 | symbols[state.symbol] = state.address; 154 | } 155 | } 156 | onSearch({"symbols": symbols}); 157 | }, [rows, states, onSearch]); 158 | 159 | const isValid = useCallback(() => { 160 | let cnt = 0; 161 | for (let row of rows) { 162 | let state = states[row.key]; 163 | if (!state) 164 | continue; 165 | if (!state.valid) 166 | return false; 167 | if (state.address && state.symbol) 168 | cnt++; 169 | } 170 | return cnt > 0; 171 | }, [rows, states]); 172 | 173 | return ( 174 |
175 | {rows} 176 | 177 |
178 | 186 |
187 |
188 | ); 189 | } 190 | 191 | function Result({ id, buildid, md5, symbols, download_url, symbols_url }) { 192 | const classes = useStyles(); 193 | const [open, setOpen] = useState(false); 194 | 195 | const onToggle = useCallback((evt) => { 196 | evt.preventDefault(); 197 | setOpen(!open); 198 | }, [open]); 199 | 200 | let symbolRows = Object.entries(symbols).map(([k, v]) => ( 201 | 202 | {k} 203 | {v} 204 | 205 | )); 206 | 207 | return ( 208 |
209 | {id} 210 | {open && ( 211 | 212 | 213 | 214 | Download 215 | 216 | Click to download 217 | 218 | 219 | 220 | All Symbols 221 | 222 | Click to download 223 | 224 | 225 | 226 | BuildID 227 | {buildid} 228 | 229 | 230 | MD5 231 | {md5} 232 | 233 | {symbolRows} 234 | 235 |
236 | )} 237 |
238 | ); 239 | } 240 | 241 | function App() { 242 | const [loading, setLoading] = useState(false); 243 | const [results, setResults] = useState(null); 244 | 245 | const onSearch = (data) => { 246 | setLoading(true); 247 | (async () => { 248 | try { 249 | setResults(await api('/find', data)); 250 | } finally { 251 | setLoading(false); 252 | } 253 | })(); 254 | }; 255 | 256 | return ( 257 |
258 |

Powered by the libc-database search API

259 | 260 | 261 | 262 |

Search

263 | 264 |
265 | 266 |

Results

267 | {loading && } 268 | {results !== null && results.map(x => )} 269 |
270 |
271 |
272 | ); 273 | } 274 | 275 | export default App; 276 | -------------------------------------------------------------------------------- /searchengine/frontend/src/App.test.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { render } from '@testing-library/react'; 3 | import App from './App'; 4 | 5 | test('renders learn react link', () => { 6 | const { getByText } = render(); 7 | const linkElement = getByText(/learn react/i); 8 | expect(linkElement).toBeInTheDocument(); 9 | }); 10 | -------------------------------------------------------------------------------- /searchengine/frontend/src/index.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin: 0; 3 | font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', 4 | 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', 5 | sans-serif; 6 | -webkit-font-smoothing: antialiased; 7 | -moz-osx-font-smoothing: grayscale; 8 | } 9 | 10 | code { 11 | font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', 12 | monospace; 13 | } 14 | -------------------------------------------------------------------------------- /searchengine/frontend/src/index.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import './index.css'; 4 | import App from './App'; 5 | import * as serviceWorker from './serviceWorker'; 6 | 7 | ReactDOM.render( 8 | 9 | 10 | , 11 | document.getElementById('root') 12 | ); 13 | 14 | // If you want your app to work offline and load faster, you can change 15 | // unregister() to register() below. Note this comes with some pitfalls. 16 | // Learn more about service workers: https://bit.ly/CRA-PWA 17 | serviceWorker.unregister(); 18 | -------------------------------------------------------------------------------- /searchengine/frontend/src/serviceWorker.js: -------------------------------------------------------------------------------- 1 | // This optional code is used to register a service worker. 2 | // register() is not called by default. 3 | 4 | // This lets the app load faster on subsequent visits in production, and gives 5 | // it offline capabilities. However, it also means that developers (and users) 6 | // will only see deployed updates on subsequent visits to a page, after all the 7 | // existing tabs open on the page have been closed, since previously cached 8 | // resources are updated in the background. 9 | 10 | // To learn more about the benefits of this model and instructions on how to 11 | // opt-in, read https://bit.ly/CRA-PWA 12 | 13 | const isLocalhost = Boolean( 14 | window.location.hostname === 'localhost' || 15 | // [::1] is the IPv6 localhost address. 16 | window.location.hostname === '[::1]' || 17 | // 127.0.0.0/8 are considered localhost for IPv4. 18 | window.location.hostname.match( 19 | /^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/ 20 | ) 21 | ); 22 | 23 | export function register(config) { 24 | if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) { 25 | // The URL constructor is available in all browsers that support SW. 26 | const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href); 27 | if (publicUrl.origin !== window.location.origin) { 28 | // Our service worker won't work if PUBLIC_URL is on a different origin 29 | // from what our page is served on. This might happen if a CDN is used to 30 | // serve assets; see https://github.com/facebook/create-react-app/issues/2374 31 | return; 32 | } 33 | 34 | window.addEventListener('load', () => { 35 | const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`; 36 | 37 | if (isLocalhost) { 38 | // This is running on localhost. Let's check if a service worker still exists or not. 39 | checkValidServiceWorker(swUrl, config); 40 | 41 | // Add some additional logging to localhost, pointing developers to the 42 | // service worker/PWA documentation. 43 | navigator.serviceWorker.ready.then(() => { 44 | console.log( 45 | 'This web app is being served cache-first by a service ' + 46 | 'worker. To learn more, visit https://bit.ly/CRA-PWA' 47 | ); 48 | }); 49 | } else { 50 | // Is not localhost. Just register service worker 51 | registerValidSW(swUrl, config); 52 | } 53 | }); 54 | } 55 | } 56 | 57 | function registerValidSW(swUrl, config) { 58 | navigator.serviceWorker 59 | .register(swUrl) 60 | .then(registration => { 61 | registration.onupdatefound = () => { 62 | const installingWorker = registration.installing; 63 | if (installingWorker == null) { 64 | return; 65 | } 66 | installingWorker.onstatechange = () => { 67 | if (installingWorker.state === 'installed') { 68 | if (navigator.serviceWorker.controller) { 69 | // At this point, the updated precached content has been fetched, 70 | // but the previous service worker will still serve the older 71 | // content until all client tabs are closed. 72 | console.log( 73 | 'New content is available and will be used when all ' + 74 | 'tabs for this page are closed. See https://bit.ly/CRA-PWA.' 75 | ); 76 | 77 | // Execute callback 78 | if (config && config.onUpdate) { 79 | config.onUpdate(registration); 80 | } 81 | } else { 82 | // At this point, everything has been precached. 83 | // It's the perfect time to display a 84 | // "Content is cached for offline use." message. 85 | console.log('Content is cached for offline use.'); 86 | 87 | // Execute callback 88 | if (config && config.onSuccess) { 89 | config.onSuccess(registration); 90 | } 91 | } 92 | } 93 | }; 94 | }; 95 | }) 96 | .catch(error => { 97 | console.error('Error during service worker registration:', error); 98 | }); 99 | } 100 | 101 | function checkValidServiceWorker(swUrl, config) { 102 | // Check if the service worker can be found. If it can't reload the page. 103 | fetch(swUrl, { 104 | headers: { 'Service-Worker': 'script' }, 105 | }) 106 | .then(response => { 107 | // Ensure service worker exists, and that we really are getting a JS file. 108 | const contentType = response.headers.get('content-type'); 109 | if ( 110 | response.status === 404 || 111 | (contentType != null && contentType.indexOf('javascript') === -1) 112 | ) { 113 | // No service worker found. Probably a different app. Reload the page. 114 | navigator.serviceWorker.ready.then(registration => { 115 | registration.unregister().then(() => { 116 | window.location.reload(); 117 | }); 118 | }); 119 | } else { 120 | // Service worker found. Proceed as normal. 121 | registerValidSW(swUrl, config); 122 | } 123 | }) 124 | .catch(() => { 125 | console.log( 126 | 'No internet connection found. App is running in offline mode.' 127 | ); 128 | }); 129 | } 130 | 131 | export function unregister() { 132 | if ('serviceWorker' in navigator) { 133 | navigator.serviceWorker.ready 134 | .then(registration => { 135 | registration.unregister(); 136 | }) 137 | .catch(error => { 138 | console.error(error.message); 139 | }); 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /searchengine/frontend/src/setupTests.js: -------------------------------------------------------------------------------- 1 | // jest-dom adds custom jest matchers for asserting on DOM nodes. 2 | // allows you to do things like: 3 | // expect(element).toHaveTextContent(/react/i) 4 | // learn more: https://github.com/testing-library/jest-dom 5 | import '@testing-library/jest-dom/extend-expect'; 6 | -------------------------------------------------------------------------------- /searchengine/index.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import elasticsearch 3 | import glob 4 | import hashlib 5 | import logging 6 | import os 7 | import re 8 | import sys 9 | from datetime import datetime 10 | from elasticsearch import Elasticsearch 11 | from subprocess import check_output, DEVNULL 12 | 13 | import config 14 | 15 | 16 | log = logging.getLogger('indexer') 17 | 18 | 19 | BUILD_ID_REGEX = re.compile('Build ID: ([a-fA-F0-9]+)') 20 | 21 | def get_build_id(fname): 22 | try: 23 | res = check_output(['readelf', '-n', fname], stderr=DEVNULL).decode() 24 | except Exception: 25 | return None 26 | m = BUILD_ID_REGEX.search(res) 27 | if m: 28 | return m.group(1) 29 | 30 | 31 | if __name__ == '__main__': 32 | logging.basicConfig(level=logging.INFO) 33 | 34 | p = argparse.ArgumentParser() 35 | p.add_argument('--index', default=config.ES_INDEX_NAME) 36 | p.add_argument('--fresh', action='store_true') 37 | p.add_argument('dir') 38 | args = p.parse_args() 39 | 40 | es = Elasticsearch(hosts=["http://localhost:9200"]) 41 | 42 | if args.fresh: 43 | es.indices.delete(index=args.index, ignore=[404]) 44 | 45 | es.indices.create(index=args.index, ignore=[400]) 46 | es.indices.put_mapping(index=args.index, doc_type='libc', body={ 47 | 'libc': { 48 | "properties": { 49 | "symbols": { 50 | "type": "text", 51 | "analyzer": "whitespace" 52 | }, 53 | "sha1": { 54 | "type": "keyword", 55 | }, 56 | "md5": { 57 | "type": "keyword", 58 | }, 59 | "sha256": { 60 | "type": "keyword", 61 | }, 62 | "buildid": { 63 | "type": "keyword", 64 | }, 65 | "id": { 66 | "type": "keyword", 67 | }, 68 | } 69 | } 70 | }, include_type_name=True) 71 | 72 | dir = os.path.abspath(args.dir) 73 | for libc_fname in glob.glob(f'{dir}/*.so'): 74 | id, _ = os.path.splitext(os.path.basename(libc_fname)) 75 | log.info(f'{id}: indexing') 76 | res = es.get(index=args.index, id=id, ignore=[404]) 77 | if res['found']: 78 | log.info(f'{id}: already exists') 79 | continue 80 | doc = { 81 | 'id': id, 82 | 'timestamp': datetime.now(), 83 | } 84 | syms = [] 85 | with open(f'{dir}/{id}.symbols') as f: 86 | for line in f: 87 | if not line.strip(): 88 | continue 89 | name, addr = line.split() 90 | addr = int(addr, 16) 91 | syms.append(f'{name}@{addr & 0xfff:03x}') 92 | doc['symbols'] = ' '.join(syms) 93 | with open(libc_fname, 'rb') as f: 94 | libc = f.read() 95 | doc['sha1'] = hashlib.sha1(libc).hexdigest() 96 | doc['sha256'] = hashlib.sha256(libc).hexdigest() 97 | doc['md5'] = hashlib.md5(libc).hexdigest() 98 | buildid = get_build_id(libc_fname) 99 | if buildid: 100 | doc['buildid'] = buildid 101 | es.create(index=args.index, id=id, document=doc) 102 | 103 | es.indices.refresh(index=args.index) 104 | 105 | # res = es.search(index=args.index, body={"query": {"match": {"sha1": "102be3798e5d42044fb6b8f072ef609ef33ee5bf"}}}) 106 | # res = es.search(index=args.index, body={"query": {"match": {"buildid": "28a5cf977adc27c69ca78bedd595096dd1977a7d"}}}) 107 | # res = es.search(index=args.index, body={"query": {"term": {"symbols": "faccessat@190"}}}) 108 | # print("Got %d Hits:" % res['hits']['total']['value']) 109 | # for hit in res['hits']['hits']: 110 | # s = hit['_source'] 111 | # print(f"Found {s['id']}") 112 | -------------------------------------------------------------------------------- /searchengine/nginx.conf: -------------------------------------------------------------------------------- 1 | user www-data; 2 | worker_processes auto; 3 | pid /run/nginx.pid; 4 | include /etc/nginx/modules-enabled/*.conf; 5 | 6 | events { 7 | worker_connections 768; 8 | } 9 | 10 | http { 11 | sendfile on; 12 | tcp_nopush on; 13 | tcp_nodelay on; 14 | keepalive_timeout 65; 15 | types_hash_max_size 2048; 16 | 17 | include /etc/nginx/mime.types; 18 | default_type application/octet-stream; 19 | 20 | ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3; # Dropping SSLv3, ref: POODLE 21 | ssl_prefer_server_ciphers on; 22 | 23 | access_log /var/log/nginx/access.log; 24 | error_log /var/log/nginx/error.log; 25 | 26 | gzip on; 27 | 28 | server { 29 | listen 443; 30 | server_name libc.rip; 31 | ssl on; 32 | ssl_certificate /etc/letsencrypt/live/libc.rip/fullchain.pem; 33 | ssl_certificate_key /etc/letsencrypt/live/libc.rip/privkey.pem; 34 | 35 | location /api/ { 36 | proxy_pass http://127.0.0.1:8000; 37 | proxy_http_version 1.1; 38 | proxy_set_header Upgrade $http_upgrade; 39 | proxy_set_header Connection keep-alive; 40 | proxy_set_header Host $host; 41 | proxy_cache_bypass $http_upgrade; 42 | } 43 | 44 | location /download/ { 45 | alias /home/niklas/libc-database/db/; 46 | } 47 | 48 | location / { 49 | alias /home/niklas/libc-database/searchengine/frontend/build/; 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /searchengine/requirements.txt: -------------------------------------------------------------------------------- 1 | elasticsearch>=7.16,<8 2 | connexion 3 | uwsgi 4 | flask-cors 5 | -------------------------------------------------------------------------------- /searchengine/update.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd "$(dirname "$0")" 3 | cd .. 4 | ./get all 5 | cd searchengine 6 | source $HOME/.local/bin/virtualenvwrapper.sh 7 | workon libcsearch 8 | python -m index ../db 9 | -------------------------------------------------------------------------------- /searchengine/uwsgi.ini: -------------------------------------------------------------------------------- 1 | [uwsgi] 2 | module=app:app 3 | master=true 4 | workers=4 5 | vacuum=true 6 | http-socket=0.0.0.0:8000 7 | die-on-term=True 8 | buffer-size=262144 9 | harakiri=30 10 | enable-threads=true 11 | --------------------------------------------------------------------------------