├── .coveragerc ├── .gitignore ├── .gitmodules ├── .travis.yml ├── AUTHORS ├── CHANGELOG.md ├── LICENSE ├── README.md ├── builder ├── __init__.py └── builder.py ├── conf └── orthrus.conf ├── docs ├── Getting_started.md ├── Workflow.md └── Workflow_abtests.md ├── fuzzer └── __init__.py ├── gdb-orthrus └── gdb_orthrus.py ├── install_deps.sh ├── job ├── __init__.py └── job.py ├── orthrus ├── __init__.py └── commands.py ├── orthrusutils ├── __init__.py └── orthrusutils.py ├── runtime ├── GdbExtractor.py ├── SanitizerReport.py ├── __init__.py └── runtime.py ├── setup.py ├── spectrum ├── __init__.py └── afl_sancov.py ├── tests ├── __init__.py ├── test_early_exit.py ├── test_gdb_orthrus.py ├── test_orthrus_add.py ├── test_orthrus_coverage.py ├── test_orthrus_create.py ├── test_orthrus_destroy.py ├── test_orthrus_remove.py ├── test_orthrus_runtime.py ├── test_orthrus_show.py ├── test_orthrus_spectrum.py ├── test_orthrus_start.py ├── test_orthrus_triage.py └── test_validation.py ├── tool └── orthrus └── triagetool └── __init__.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = orthrus 3 | builder 4 | tool 5 | orthrusutils 6 | tests 7 | spectrum 8 | runtime 9 | 10 | [report] 11 | show_missing = True 12 | skip_covered = True 13 | exclude_lines = 14 | if __name__ == .__main__.: 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "testdata/Automake-Autoconf-Template-Project"] 2 | path = testdata/Automake-Autoconf-Template-Project 3 | url = https://github.com/test-pipeline/Automake-Autoconf-Template-Project.git 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # Ubuntu 14.04 Trusty support 2 | sudo: required 3 | dist: trusty 4 | 5 | addons: 6 | apt: 7 | sources: 8 | - sourceline: 'deb http://apt.llvm.org/trusty/ llvm-toolchain-trusty main' 9 | key_url: 'http://apt.llvm.org/llvm-snapshot.gpg.key' 10 | packages: 11 | - clang-3.8 12 | - libclang-common-3.8-dev 13 | - llvm-3.8-runtime 14 | - llvm-3.8 15 | - lcov 16 | - coreutils 17 | - automake 18 | - python3-setuptools 19 | - gdb 20 | - ninja-build 21 | 22 | language: python 23 | python: 24 | - 2.7 25 | 26 | before_install: 27 | - pip install coveralls 28 | - pip install pytest pytest-cov 29 | - sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-3.8 50 30 | - sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-3.8 50 31 | 32 | install: ./install_deps.sh 33 | 34 | script: cd testdata/Automake-Autoconf-Template-Project && export PATH=`echo $PATH | sed 's/\/usr\/local\/clang-3.5.0\/bin://g'`:$HOME/local/bin && PYTHONUNBUFFERED=1 coverage run --rcfile=../../.coveragerc ../../setup.py test 35 | after_success: 36 | - coveralls 37 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Bhargava Shastry 2 | bshastry@sec.t-labs.tu-berlin.de 3 | bshas3@gmail.com 4 | 5 | Markus Leutner 6 | mleutner@gmail.com 7 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ### v1.2 2 | 3 | - Upgraded to afl-cov v0.6 (and 1.32a afl-utils for a/b tests) 4 | - Added a/b tests feature to orthrus subcommands 5 | - This systematizes a/b experiments 6 | - Added spectrum and runtime features 7 | - Spectrum merges the feature set of afl-sancov, albeit only for routine jobs 8 | - Runtime introduces a new feature that allows ASAN crash reports to be jsonified 9 | - Major refactoring and additional test cases 10 | - Bug fixes 11 | - Compact sync dir leads to incorrect resumes 12 | - Start fuzzer not using all available cores optimally 13 | - Use afl-multikill instead of a hacky `pkill -9` to terminate multicore fuzzing sessions 14 | - This fixes bug related to shared mem segments (that afl-fuzz requests via shmget()) that were not getting freed 15 | 16 | ### v1.1 17 | 18 | - Upgraded to afl-utils 1.31a (JSON config instead of ini config) 19 | 20 | ### v1.0 21 | 22 | - First release 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Orthrus [![Build Status](https://travis-ci.org/test-pipeline/orthrus.svg?branch=master)](https://travis-ci.org/test-pipeline/orthrus) [![License](http://img.shields.io/:license-gpl3-blue.svg?style=flat-square)](http://www.gnu.org/licenses/gpl-3.0.html) [![Coverage Status](https://coveralls.io/repos/github/test-pipeline/orthrus/badge.svg?branch=master)](https://coveralls.io/github/test-pipeline/orthrus?branch=master) 2 | 3 | Orthrus is a tool for managing, conducting, and assessing dictionary-based security (fuzz) testing for [autotools][1] projects. At the moment, it supports Clang/LLVM instrumentation and the AFL ecosystem (afl-fuzz, afl-utils, afl-cov). The ultimate aim is for Orthrus to be a generic wrapper around state-of-the-art fuzz and instrumentation tools on the one hand, and disparate build systems on the other. 4 | 5 | **NEW**: The dictionary-based fuzzing feature is new. Do `orthrus create -dict` to generate a fuzzing dictionary and `orthrus add --jobconf` to specify fuzz options (e.g., `-x dict`) for making use of the generated dictionary for fuzzing. 6 | 7 | # Installation 8 | 9 | Please read [docs/Getting_started.md](docs/Getting_started.md). 10 | 11 | # Workflow 12 | 13 | Orthrus currently supports two workflows. In a routine workflow, you work with a single fuzzing job end-to-end i.e., from source code instrumentation, until crash triage. In a A/B test workflow, you work with a single A/B test end-to-end. 14 | 15 | ## Routine 16 | 17 | Please read [docs/Workflow.md](docs/Workflow.md). 18 | 19 | ## A/B testing 20 | 21 | Please read [docs/Workflow_abtests.md](docs/Workflow_abtests.md). 22 | 23 | # Full usage 24 | ``` 25 | $ orthrus -h 26 | usage: Orthrus 1.1 by Bhargava Shastry, and Markus Leutner 27 | [-h] [-v] 28 | {create,add,remove,start,stop,show,triage,coverage,spectrum,runtime,destroy,validate} 29 | ... 30 | 31 | optional arguments: 32 | -h, --help show this help message and exit 33 | -v, --verbose Verbose mode, print information about the progress 34 | 35 | subcommands: 36 | Orthrus subcommands 37 | 38 | {create,add,remove,start,stop,show,triage,coverage,spectrum,runtime,destroy,validate} 39 | create Create an orthrus workspace 40 | add Add a fuzzing job 41 | remove Remove a fuzzing job 42 | start Start a fuzzing jobs 43 | stop Stop a fuzzing jobs 44 | show Show what's currently going on 45 | triage Triage crash corpus 46 | coverage Run afl-cov on existing AFL corpus 47 | spectrum Run spectrum based analysis on existing AFL corpus 48 | runtime Perform dynamic analysis of existing AFL corpus 49 | destroy Destroy an orthrus workspace 50 | validate Check if all Orthrus dependencies are met 51 | ``` 52 | 53 | # Issues and PRs 54 | 55 | - Feel free to file an issue if something doesn't work as expected :-) 56 | - Attaching logs from `.orthrus/logs` would be helpful 57 | - PRs for interesting workflows are much appreciated! 58 | 59 | # Credits 60 | 61 | Orthrus was possible due to excellent work by 62 | 63 | - lcamtuf (afl-fuzz) 64 | - rc0r (afl-utils) 65 | - Michael Rash (afl-cov) 66 | - Clang/LLVM sanitization projects 67 | - Folks at afl users community and beyond 68 | 69 | [1]: https://en.wikipedia.org/wiki/GNU_Build_System 70 | 71 | -------------------------------------------------------------------------------- /builder/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/test-pipeline/orthrus/7e916f36ceffcc4fdd9013a4d952649f69738aa8/builder/__init__.py -------------------------------------------------------------------------------- /builder/builder.py: -------------------------------------------------------------------------------- 1 | import os 2 | from collections import namedtuple 3 | import orthrusutils.orthrusutils as util 4 | 5 | class BuildEnv(object): 6 | 7 | cwd = os.getcwd() 8 | blacklist_file = '{}/asan_blacklist.txt'.format(cwd) 9 | 10 | BEnv = namedtuple('BEnv', ['cc', 'cxx', 'cflags', 'cxxflags', 'ldflags', 11 | 'ldxxflags', 'misc']) 12 | 13 | BEnv_afl_asan = BEnv('afl-clang', 'afl-clang++', '-O3', '-O3', '', '', 14 | {'AFL_USE_ASAN': '1', 'AFL_DONT_OPTIMIZE': '1'}) 15 | 16 | BEnv_afl_asan_blacklist = BEnv('afl-clang', 'afl-clang++', '-O3 -fsanitize-blacklist={}'.format(blacklist_file), 17 | '-O3 -fsanitize-blacklist={}'.format(blacklist_file), 18 | '-fsanitize-blacklist={}'.format(blacklist_file), 19 | '-fsanitize-blacklist={}'.format(blacklist_file), 20 | {'AFL_USE_ASAN': '1', 'AFL_DONT_OPTIMIZE': '1'}) 21 | 22 | BEnv_afl_harden = BEnv('afl-clang', 'afl-clang++', '-O2', '-O2', '', '', 23 | {'AFL_HARDEN': '1', 'AFL_DONT_OPTIMIZE': '1'}) 24 | 25 | BEnv_afl_harden_softfail = BEnv('afl-clang', 'afl-clang++', '-O2', '-O2', '', '', 26 | {'AFL_DONT_OPTIMIZE': '1'}) 27 | 28 | BEnv_asan_debug = BEnv('clang', 'clang++', '-g -O0 -fsanitize=address -fno-omit-frame-pointer', 29 | '-g -O0 -fsanitize=address -fno-omit-frame-pointer', 30 | '-fsanitize=address', '-fsanitize=address', {}) 31 | 32 | BEnv_asan_debug_blacklist = BEnv('clang', 'clang++', 33 | '-g -O0 -fsanitize=address -fno-omit-frame-pointer -fsanitize-blacklist={}'.format(blacklist_file), 34 | '-g -O0 -fsanitize=address -fno-omit-frame-pointer -fsanitize-blacklist={}'.format(blacklist_file), 35 | '-fsanitize=address -fsanitize-blacklist={}'.format(blacklist_file), 36 | '-fsanitize=address -fsanitize-blacklist={}'.format(blacklist_file), {}) 37 | 38 | BEnv_harden_debug = BEnv('clang', 'clang++', '-g -O0 -fstack-protector-all -D_FORTIFY_SOURCE=2 ' \ 39 | '-fno-omit-frame-pointer', '-g -O0 -fstack-protector-all ' \ 40 | '-D_FORTIFY_SOURCE=2 -fno-omit-frame-pointer', '', '', {}) 41 | 42 | BEnv_harden_debug_softfail = BEnv('clang', 'clang++', '-g -O0 -fstack-protector-all ' \ 43 | '-fno-omit-frame-pointer', '-g -O0 -fstack-protector-all ' \ 44 | '-fno-omit-frame-pointer', '', '', {}) 45 | 46 | BEnv_gcc_coverage = BEnv('gcc', 'g++', '-g -O0 -fprofile-arcs -ftest-coverage', 47 | '-g -O0 -fprofile-arcs -ftest-coverage', '-lgcov', '-lgcov', {}) 48 | 49 | BEnv_asan_coverage = BEnv('clang', 'clang++', 50 | '-g -O0 -fsanitize=address -fno-omit-frame-pointer -fsanitize-coverage=bb', 51 | '-g -O0 -fsanitize=address -fno-omit-frame-pointer -fsanitize-coverage=bb', 52 | '-fsanitize=address', '-fsanitize=address', {}) 53 | BEnv_ubsan_coverage = BEnv('clang', 'clang++', 54 | '-g -O0 -fsanitize=undefined -fsanitize-coverage=bb', 55 | '-g -O0 -fsanitize=undefined -fsanitize-coverage=bb', 56 | '-fsanitize=undefined', '-fsanitize=undefined', {}) 57 | BEnv_bear = BEnv('clang', 'clang++', '', '', '', '', {}) 58 | 59 | 60 | def __init__(self, buildenv): 61 | 62 | self.buildenv = os.environ.copy() 63 | self.exportvars = {} 64 | self.exportvars['CC'] = buildenv.cc 65 | self.exportvars['CXX'] = buildenv.cxx 66 | self.exportvars['CFLAGS'] = buildenv.cflags 67 | self.exportvars['CXXFLAGS'] = buildenv.cxxflags 68 | self.exportvars['LDFLAGS'] = buildenv.ldflags 69 | self.exportvars['LDXXFLAGS'] = buildenv.ldxxflags 70 | self.exportvars.update(buildenv.misc) 71 | 72 | def getenvdict(self): 73 | self.buildenv.update(self.exportvars) 74 | return self.buildenv 75 | 76 | class Builder(object): 77 | 78 | def __init__(self, buildenv, configargs, logfile=None): 79 | self.env = buildenv.getenvdict() 80 | self.configargs = configargs 81 | self.logfile = logfile 82 | 83 | def configure(self): 84 | 85 | if not os.path.isfile("configure"): 86 | return False 87 | 88 | # AFL-fuzz likes statically linked binaries 89 | # "--disable-shared " + 90 | command = ["./configure " + " ".join(self.configargs)] 91 | 92 | if not util.run_cmd(command, self.env, self.logfile): 93 | return False 94 | return True 95 | 96 | def make_install(self): 97 | if not os.path.isfile("Makefile"): 98 | return False 99 | 100 | command = ["make clean && make -j install"] 101 | if not util.run_cmd(command, self.env, self.logfile): 102 | return False 103 | return True 104 | 105 | def bear_make(self): 106 | if not os.path.isfile("Makefile"): 107 | return False 108 | 109 | command = ["make clean && bear make -j"] 110 | if not util.run_cmd(command, self.env, self.logfile): 111 | return False 112 | return True 113 | 114 | def clang_sdict(self): 115 | if not self.bear_make(): 116 | return False 117 | command = ["find . -type f \( -name \"*.c\" -o -name \"*.cpp\" -o -name \"*.cc\" \) -print0 |" 118 | " xargs -0 clang-sdict -p 1>> dict.clang"] 119 | if not util.run_cmd(command, self.env, self.logfile): 120 | return False 121 | return True -------------------------------------------------------------------------------- /conf/orthrus.conf: -------------------------------------------------------------------------------- 1 | ### 2 | # This file is sourced by Orthrus for configuration. 3 | ### 4 | 5 | ## 6 | ## Orthrus config 7 | ## 8 | [orthrus] 9 | directory = .orthrus 10 | 11 | ## 12 | ## Dependencies: Mark on (if required) or off (otherwise) 13 | ## 14 | [dependencies] 15 | 16 | # Instrumentation 17 | clang = on 18 | gcc = on 19 | 20 | # Fuzz 21 | afl-fuzz = on 22 | afl-clang = on 23 | afl-clang++ = on 24 | 25 | # Fuzz management and triage 26 | afl-collect = on 27 | afl-multicore = on 28 | afl-multikill = on 29 | afl-minimize = on 30 | gdb = on 31 | 32 | # Coverage 33 | afl-cov = on 34 | lcov = on 35 | genhtml = on 36 | sancov-3.8 = on 37 | llvm-symbolizer-3.8 = on 38 | pysancov = on 39 | 40 | # Dict generation 41 | bear = on 42 | clang-sdict = on 43 | -------------------------------------------------------------------------------- /docs/Getting_started.md: -------------------------------------------------------------------------------- 1 | # Pre-requisites 2 | 3 | - python 4 | - 2.7 for orthrus 5 | - 3.X for afl-utils 6 | - [afl-fuzz][1] 7 | - [afl-utils][2] 8 | - virtualenv and virtualenvwrapper (optional but highly recommended) 9 | 10 | ```bash 11 | $ pip install virtualenv && pip install virtualenvwrapper 12 | ``` 13 | 14 | - Clang/LLVM toolchain (Tested with v3.8) 15 | ```bash 16 | $ sudo apt-get install lcov 17 | $ curl -sSL "http://apt.llvm.org/llvm-snapshot.gpg.key" | sudo -E apt-key add - 18 | $ echo "deb http://apt.llvm.org/trusty/ llvm-toolchain-trusty-3.8 main" | sudo tee -a /etc/apt/sources.list > /dev/null 19 | $ sudo apt-get update 20 | $ sudo apt-get --no-install-suggests --no-install-recommends --force-yes install clang-3.8 libclang-common-3.8-dev llvm-3.8-runtime llvm-3.8 21 | ``` 22 | - lcov 23 | ```bash 24 | $ sudo apt-get install lcov 25 | ``` 26 | - pysancov 27 | ```bash 28 | $ wget -q https://raw.githubusercontent.com/llvm-mirror/compiler-rt/release_38/lib/sanitizer_common/scripts/sancov.py &> /dev/null 29 | $ chmod +x sancov.py &> /dev/null 30 | $ sudo mv sancov.py /usr/local/bin/pysancov &> /dev/null 31 | ``` 32 | - bear (v2.1.5) for clang tooling compile commands (**xenial users**: `sudo apt-get install bear`, other users, do as follows) 33 | ```bash 34 | $ wget https://launchpadlibrarian.net/240291131/bear_2.1.5.orig.tar.gz 35 | $ tar xzf bear_2.1.5.orig.tar.gz && rm bear_2.1.5.orig.tar.gz 36 | $ mkdir Bear-2.1.5.build && cd Bear-2.1.5.build && cmake ../Bear-2.1.5 && make -j all && make install 37 | $ cd .. && rm -rf Bear-2.1.5 Bear-2.1.5.build 38 | ``` 39 | - clang-sdict for input dictionary generation 40 | ```bash 41 | $ export CLANG_SDICT="https://www.dropbox.com/s/lqayfheheo3coag/fork-6173707-6216-gb24cc33-153448-2017-04-10.tar.gz?dl=0" 42 | $ curl -o clang.tar.gz -L "$CLANG_SDICT" 43 | $ tar xzf clang.tar.gz -C $HOME/local 44 | $ export PATH=$PATH:$HOME/local 45 | ``` 46 | 47 | - An autotools open-source project for fuzzing 48 | 49 | # Python package dependencies 50 | 51 | You can install Orthus' python dependencies (afl-utils, and afl-cov) via virtualenvwrapper or natively. The former is recommended. 52 | 53 | ## Using Virtualenvwrapper 54 | 55 | All steps that work assume that you are creating and working with the `afl` virtualenv (`workon afl`) 56 | 57 | - afl-utils v1.32a 58 | 59 | ```bash 60 | $ mkvirtualenv -p /usr/bin/python3.4 afl 61 | $ wget -q https://github.com/rc0r/afl-utils/archive/v1.32a.tar.gz && tar xzf v1.32a.tar.gz 62 | $ rm v1.32a.tar.gz && cd afl-utils-1.32a 63 | $ python setup.py install 64 | ``` 65 | 66 | - afl-cov (v0.6) 67 | 68 | ```bash 69 | $ wget -q https://github.com/mrash/afl-cov/archive/0.6.tar.gz && tar xzf 0.6.tar.gz 70 | $ rm 0.6.tar.gz && cd afl-cov-0.6 71 | $ cp afl-cov ~/.virtualenvs/afl/bin/ 72 | ``` 73 | 74 | 75 | ## Native installation 76 | 77 | - afl-utils v1.32a 78 | ```bash 79 | $ cd $HOME 80 | $ wget -q https://github.com/rc0r/afl-utils/archive/v1.32a.tar.gz && tar xzf v1.32a.tar.gz 81 | $ rm v1.32a.tar.gz && cd afl-utils-1.32a 82 | $ sudo mkdir -p /usr/lib/python3.4/site-packages && sudo python3 setup.py install 83 | $ cd ../ 84 | $ echo "source /usr/lib/python3.4/site-packages/exploitable-1.32_rcor-py3.4.egg/exploitable/expl 85 | $ sudo rm -rf afl-utils-1.32a 86 | ``` 87 | 88 | - afl-cov (v0.6) 89 | ```bash 90 | $ wget -q https://github.com/mrash/afl-cov/archive/0.6.tar.gz && tar xzf 0.6.tar.gz 91 | $ rm 0.6.tar.gz && cd afl-cov-0.6 92 | $ sudo cp afl-cov /usr/local/bin/ 93 | $ cd .. && rm -rf afl-cov-0.6 94 | ``` 95 | 96 | # Installation 97 | 98 | ## Using Virtualenvwrapper 99 | 100 | ```bash 101 | $ mkdir -p ~/.virtualenvs/afl/lib/python2.7/site-packages/ 102 | $ export PYTHONPATH=$HOME/.virtualenvs/afl/lib/python2.7/site-packages 103 | $ git clone https://github.com/test-pipeline/orthrus.git && cd orthrus 104 | $ python2.7 setup.py install --prefix ~/.virtualenvs/afl 105 | ``` 106 | 107 | A convenient alias would be something like this appended to `~/.bash_aliases` 108 | ``` 109 | # Uses the right PYTHONPATH before invoking orthrus 110 | alias orthrus=`PYTHONPATH=$HOME/.virtualenvs/afl/lib/python2.7/site-packages orthrus` 111 | ``` 112 | 113 | Whenever you want to fuzz an autotools codebase, you may simply summon orthrus, afl-utils, and afl-cov by doing `workon afl`. 114 | 115 | ## Native installation 116 | 117 | ``` 118 | $ (sudo) python2.7 setup.py install 119 | ``` 120 | 121 | [1]: http://lcamtuf.coredump.cx/afl/ 122 | [2]: https://github.com/rc0r/afl-utils/tree/v1.32a 123 | [3]: https://github.com/mrash/afl-cov/ 124 | -------------------------------------------------------------------------------- /docs/Workflow.md: -------------------------------------------------------------------------------- 1 | ## Step 1: Validate dependencies (One-time only) 2 | 3 | Orthrus depends on quite a few packages from Clang/LLVM and the AFL ecosystem. To make sure you don't have to wade through into ugly error messages, 4 | it makes sense to validate these dependencies. You do it, like so 5 | ``` 6 | $ orthrus validate 7 | [+] Validating Orthrus dependencies 8 | [+] The following programs have been marked as required in ~/.orthrus/orthrus.conf 9 | [+] clang 10 | [+] gcc 11 | [+] afl-fuzz 12 | [+] afl-clang 13 | [+] afl-clang++ 14 | [+] afl-collect 15 | [+] afl-multicore 16 | [+] afl-minimize 17 | [+] gdb 18 | [+] afl-cov 19 | [+] lcov 20 | [+] genhtml 21 | [+] sancov-3.8 22 | [+] llvm-symbolizer-3.8 23 | [+] pysancov 24 | [+] Checking if requirements are met... done 25 | [+] All requirements met. Orthrus is ready for use! 26 | ``` 27 | 28 | ## Step 2: Create instrumented binaries 29 | 30 | - Creates the following binaries 31 | - ASAN+AFL instrumentation (fuzzing) 32 | - AFL+HARDEN instrumentation only (fuzzing) 33 | - ASAN Debug (triage) 34 | - HARDEN Debug (triage) 35 | - Gcov (coverage) 36 | - All binaries installed in `.orthrus/binaries` subdir relative to WD root 37 | 38 | ``` 39 | $ cd $AUTOTOOLS_PROJECT_WD 40 | $ orthrus create -fuzz -asan -cov 41 | [+] Create Orthrus workspace 42 | [+] Installing binaries for afl-fuzz with AddressSanitizer 43 | [+] Configure... done 44 | [+] Compile and install... done 45 | [+] Verifying instrumentation... done 46 | [+] Installing binaries for debug with AddressSanitizer 47 | [+] Configure... done 48 | [+] Compile and install... done 49 | [+] Verifying instrumentation... done 50 | [+] Installing binaries for afl-fuzz in harden mode 51 | [+] Configure... done 52 | [+] Compile and install... done 53 | [+] Verifying instrumentation... done 54 | [+] Installing binaries for debug in harden mode 55 | [+] Configure... done 56 | [+] Compile and install... done 57 | [+] Verifying instrumentation... done 58 | [+] Installing binaries for obtaining test coverage information 59 | [+] Configure... done 60 | [+] Compile and install... done 61 | [+] Verifying instrumentation... done 62 | ``` 63 | 64 | ## Step 3: Add/Remove fuzzing job 65 | 66 | - Sets up config for (local) multi-core job with AFL+HARDEN (master) and 67 | ASAN+AFL (slave) 68 | - Each job allocated an independent data directory 69 | - Can be operated (started, stopped, managed) independently 70 | - Fuzzer and arguments are specified in a configuration file 71 | 72 | ``` 73 | $ cat routine.conf 74 | { 75 | "fuzzer": "afl-fuzz", 76 | "fuzzer_args": "" 77 | } 78 | 79 | $ orthrus add --job="main @@" --jobtype=routine --jobconf=routine.conf 80 | [+] Adding fuzzing job to Orthrus workspace 81 | [+] Check Orthrus workspace... done 82 | [+] Adding job for [main]... done 83 | [+] Configuring job for [main]... done 84 | ``` 85 | 86 | - To remove an existing job, you need to look up the job ID first... 87 | ```bash 88 | $ ls .orthrus/jobs/ 89 | 1167520733 jobs.conf 90 | ``` 91 | - ...and pass the job ID as an argument to orthrus remove 92 | ``` 93 | $ orthrus remove -j 1167520733 94 | [+] Removing fuzzing job from Orthrus workspace 95 | [+] Check Orthrus workspace... done 96 | [+] Archiving data for job [1167520733]... done 97 | [+] Removing job for [1167520733]... done 98 | ``` 99 | 100 | - You can also import an existing AFL generated corpus tarball (contents of 101 | afl-sync-dir e.g., SESSION000, SESSION001, etc.) 102 | 103 | ``` 104 | $ orthrus add --job="main @@" -i=./afl-out.tar.gz --jobtype=routine --jobconf=routine.conf 105 | [+] Adding fuzzing job to Orthrus workspace 106 | [+] Check Orthrus workspace... done 107 | [+] Adding job for [main]... done 108 | [+] Configuring job for [main]... done 109 | [+] Import afl sync dir for job [1167520733]... done 110 | [+] Minimizing corpus for job [1167520733]... 111 | [*] Looking for fuzzing queues in '/home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Projec 112 | t/.orthrus/jobs/1167520733/afl-out'. 113 | 114 | [*] Found 4 fuzzers, collecting samples. 115 | 116 | [*] Successfully indexed 5 samples. 117 | 118 | [*] Copying 5 samples into collection directory... 119 | 120 | [*] Executing: afl-cmin -i /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthr 121 | us/jobs/1167520733/collect -o /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/co 122 | llect.cmin -- /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/binaries/afl-harden/bin/main @@ 123 | 124 | [*] Testing the target binary... 125 | 126 | [*] Obtaining traces for input files in '/home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/collect'... 127 | 128 | [*] Sorting trace sets (this may take a while)... 129 | 130 | [*] Finding best candidates for each tuple... 131 | 132 | [*] Sorting candidate list (be patient)... 133 | 134 | [*] Processing candidates and writing output files... 135 | 136 | [*] Performing dry-run in /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/collect.cmin... 137 | 138 | [!] Be patient! Depending on the corpus size this step can take hours... 139 | 140 | [!] Collection directory exists and is not empty! 141 | 142 | [!] Skipping collection step... 143 | 144 | [*] Reseeding collect.cmin into queue /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/afl-out/SESSION003/queue 145 | 146 | [*] Reseeding collect.cmin into queue /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/afl-out/SESSION002/queue 147 | 148 | [*] Reseeding collect.cmin into queue /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/afl-out/SESSION000/queue 149 | 150 | [*] Reseeding collect.cmin into queue /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/afl-out/SESSION001/queue 151 | 152 | ``` 153 | 154 | - You can seed a job, like so 155 | ``` 156 | $ orthrus add --job="main @@" -s=./seeds --jobtype=routine --jobconf=routine.conf 157 | [+] Adding fuzzing job to Orthrus workspace 158 | [+] Check Orthrus workspace... done 159 | [+] Adding job for [main]... done 160 | [+] Configuring job for [main]... done 161 | [+] Adding initial samples for job [main]... done 162 | [+] Adding initial samples for job [main]... done 163 | ``` 164 | 165 | 166 | ## Step 4: Start/Stop afl fuzzers (via afl-utils) 167 | 168 | - To start fuzzing for a pre-defined job, you do 169 | ``` 170 | $ orthrus start -j 1167520733 171 | [+] Starting fuzzing jobs 172 | [+] Check Orthrus workspace... done 173 | [+] Start Fuzzers for Job [1167520733]... Checking core_pattern...okay 174 | [+] Starting AFL harden fuzzer job as master...done 175 | [*] Starting master instance... 176 | 177 | [+] Master 000 started (PID: 15969) 178 | [*] Starting slave instances... 179 | 180 | [+] Slave 001 started (PID: 15970) 181 | [+] Starting AFL ASAN fuzzer job as slave...done 182 | [*] Starting slave instances... 183 | 184 | [+] Slave 001 started (PID: 16151) 185 | [+] Slave 002 started (PID: 16155) 186 | ``` 187 | 188 | - To stop fuzzing, you do 189 | ``` 190 | $ orthrus stop -j 1167520733 191 | [+] Stopping fuzzing jobs...done 192 | ``` 193 | 194 | - To resume an earlier session, do 195 | ``` 196 | $ orthrus start -j 1167520733 -m 197 | [+] Starting fuzzing jobs 198 | [+] Check Orthrus workspace... done 199 | [+] Tidy fuzzer sync dir... done 200 | [+] Minimizing corpus for job [1167520733]... 201 | [*] Looking for fuzzing queues in '/home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Projec 202 | t/.orthrus/jobs/1167520733/afl-out'. 203 | 204 | [*] Found 1 fuzzers, collecting samples. 205 | 206 | [*] Successfully indexed 3 samples. 207 | 208 | [*] Copying 3 samples into collection directory... 209 | 210 | [*] Executing: afl-cmin -i /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthr 211 | us/jobs/1167520733/collect -o /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/co 212 | llect.cmin -- /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/binaries/afl-harden/bin/main @@ 213 | 214 | [*] Testing the target binary... 215 | 216 | [*] Obtaining traces for input files in '/home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/collect'... 217 | 218 | [*] Sorting trace sets (this may take a while)... 219 | 220 | [*] Finding best candidates for each tuple... 221 | 222 | [*] Sorting candidate list (be patient)... 223 | 224 | [*] Processing candidates and writing output files... 225 | 226 | [*] Performing dry-run in /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/collect.cmin... 227 | 228 | [!] Be patient! Depending on the corpus size this step can take hours... 229 | 230 | [!] Collection directory exists and is not empty! 231 | 232 | [!] Skipping collection step... 233 | 234 | [*] Reseeding collect.cmin into queue /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/1167520733/afl-out/SESSION000/queue 235 | 236 | [+] Start Fuzzers for Job [1167520733]... Checking core_pattern...okay 237 | [+] Starting AFL harden fuzzer job as master...done 238 | [*] Starting master instance... 239 | 240 | [+] Master 000 started (PID: 28501) 241 | [*] Starting slave instances... 242 | 243 | [+] Slave 001 started (PID: 28502) 244 | [+] Starting AFL ASAN fuzzer job as slave...done 245 | [*] Starting slave instances... 246 | 247 | [+] Slave 001 started (PID: 28809) 248 | [+] Slave 002 started (PID: 28813) 249 | ``` 250 | 251 | 252 | - The `-m` flag minimizes the existing AFL corpus, archives the existing queue 253 | dir, reseeds it with the minimized seeds, and resumes fuzzing 254 | 255 | ## Step 5: Monitor test coverage (via afl-cov) 256 | 257 | You can either: 258 | 259 | - Monitor test coverage during a live fuzzing session 260 | ``` 261 | $ orthrus start -j 1167520733 -c 262 | [+] Starting fuzzing jobs 263 | [+] Check Orthrus workspace... done 264 | [+] Start afl-cov for Job [1167520733]... done 265 | [+] Start Fuzzers for Job [1167520733]... Checking core_pattern...okay 266 | [+] Starting AFL harden fuzzer job as master...done 267 | [*] Starting master instance... 268 | 269 | [+] Master 000 started (PID: 25378) 270 | [*] Starting slave instances... 271 | 272 | [+] Slave 001 started (PID: 25379) 273 | ``` 274 | 275 | - OR check test coverage post testing (when all instances of afl-fuzz are dead) 276 | ``` 277 | $ orthrus coverage -j 1167520733 278 | [+] Checking test coverage for job [1167520733]... done 279 | [+] Please check .orthrus/jobs/1167520733/afl-out/cov for coverage info 280 | ``` 281 | 282 | You may force stop a live afl-cov instance along with all fuzz sessions, like so 283 | ``` 284 | $ orthrus stop -j 1167520733 -c 285 | [+] Stopping fuzzing jobs...done 286 | [+] Stopping afl-cov for jobs...done 287 | ``` 288 | 289 | ## Step 6: Triage crashes (via afl-utils/exploitable) 290 | 291 | - To triage an existing AFL corpus, do 292 | ``` 293 | $ orthrus triage -j 1167520733 294 | [+] Triaging crashes for job [1167520733] 295 | [+] Collect and verify 'harden' mode crashes... done 296 | [+] Tidying crash dir...done! 297 | [+] Collect and verify 'asan' mode crashes... done 298 | [+] Tidying crash dir...done! 299 | [+] Collect and verify 'all' mode crashes... done 300 | [+] Tidying crash dir...done! 301 | [+] Triaged 15 crashes. See .orthrus/jobs/1167520733/unique/ 302 | ``` 303 | 304 | ## Step 7: Obtain crash spectrum 305 | 306 | - Crash spectrum technically means basic block coverage of crashing executions (slice) and differential basic block coverage of crashing minus non crashing input (dice) 307 | - To obtain the spectra, do 308 | ``` 309 | $ orthrus spectrum -j 3138688894 310 | [+] Starting spectrum generation for job ID [3138688894] 311 | [+] Checking Orthrus workspace... done 312 | [+] Retrieving job ID [3138688894]... done 313 | [+] Generating crash spectrum routine job ID [3138688894] 314 | 315 | *** Imported 60 new crash files from: .orthrus/jobs/routine/3138688894/afl-out/unique 316 | 317 | [+] Processing crash file (1/60) 318 | ... 319 | ``` 320 | 321 | - Output is written to `.orthrus/jobs/routine/job_id/crash-analysis/spectrum` 322 | 323 | ## Step 8: Obtain runtime crash information 324 | 325 | - Run time information includes faulting addresses, crash backtrace etc. 326 | - At the moment, only ASAN reports are parsed, in the future raw core dump parsing may be introduced 327 | - The parsed information is JSONified in the output dir (filename corresponds to crashing input filename) 328 | ``` 329 | $ orthrus runtime -j 538551600 330 | [+] Starting dynamic analysis of all crashes for job ID [538551600] 331 | [+] Checking Orthrus workspace... done 332 | [+] Retrieving job ID [538551600]... done 333 | [+] Performing dynamic analysis of crashes for routine job ID [538551600] 334 | [+] Analyzing crash 1 of 1... done 335 | [+] JSONifying ASAN report... done 336 | ``` 337 | 338 | - Output is written to `.orthrus/jobs/routine/job_id/crash-analysis/runtime` 339 | 340 | ## Step 9: User interface for fuzz status and coverage 341 | 342 | - You may view configured jobs, like so 343 | ``` 344 | $ orthrus show -conf 345 | Configured jobs found: 346 | 0) [1167520733] main @@ 347 | ``` 348 | 349 | - You may view the current status of a job (via afl-whatsup) 350 | ``` 351 | $ orthrus show -j 1167520733 352 | Status of jobs: 353 | Job [1167520733] for target 'main': 354 | Fuzzers alive : 0 355 | Dead or remote : 2 (excluded from stats) 356 | Total run time : 0 days, 0 hours 357 | Total execs : 0 million 358 | Cumulative speed : 0 execs/sec 359 | Pending paths : 0 faves, 0 total 360 | Crashes found : 0 locally unique 361 | 362 | Triaged crashes : 0 available 363 | ``` 364 | 365 | - You may view coverage report (via afl-cov) 366 | ``` 367 | $ orthrus show -cov 368 | Opening coverage html for job 1167520733 in a new browser tab 369 | ``` 370 | 371 | ## Step 10: Destroy orthrus session 372 | 373 | - This permanently deletes all orthrus data (under `.orthrus`) 374 | ``` 375 | $ orthrus destroy 376 | [+] Destroy Orthrus workspace 377 | [?] Delete complete workspace? [y/n]...: y 378 | [+] Deleting all files... done 379 | ``` 380 | -------------------------------------------------------------------------------- /docs/Workflow_abtests.md: -------------------------------------------------------------------------------- 1 | # Basics 2 | 3 | - A/B testing of fuzzers or fuzzing variations supported at the moment 4 | - You can A/B test afl-fuzz vs afl-fuzz-fast 5 | - You can A/B test afl-fuzz vs afl-fuzz -d 6 | - You **cannot** A/B test program1 vs program 2 7 | - You **cannot** A/B test program1 --arg1 vs program1 --arg2 8 | - Please bear in mind that the test program, arguments, and fuzz configuaration are identical for both the control (A) and experiment (B) groups 9 | - The only thing that is going to be different is the fuzzer used and/or arguments passed to it 10 | - It is your sole responsibility to use the A/B test interface meaningfully 11 | 12 | ## Step 1: Validate dependencies for A/B testing 13 | 14 | If you have already validated Orthrus before, you only need to validate incremental dependencies of A/B testing. For instance, you may want to use afl-fuzz-fast for A/B tests with afl-fuzz. For validating dependencies introduced by A/B testing, simply add the dependency in the (already existing) `dependencies` section in `~/.orthrus/orthrus.conf`, like so 15 | ``` 16 | [dependencies] 17 | clang = on 18 | gcc = on 19 | ... 20 | 21 | afl-fuzz-fast = on 22 | 23 | ``` 24 | 25 | ``` 26 | $ orthrus validate 27 | [+] Validating Orthrus dependencies 28 | [+] The following programs have been marked as required in ~/.orthrus/orthrus.conf 29 | [+] clang 30 | [+] gcc 31 | [+] afl-fuzz 32 | [+] afl-clang 33 | [+] afl-clang++ 34 | [+] afl-collect 35 | [+] afl-multicore 36 | [+] afl-minimize 37 | [+] gdb 38 | [+] afl-cov 39 | [+] lcov 40 | [+] genhtml 41 | [+] afl-fuzz-fast 42 | [+] Checking if requirements are met. 43 | [+] All requirements met. Orthrus is ready for use! 44 | ``` 45 | 46 | ## Step 2: Create instrumented binaries 47 | 48 | Creating binaries is no different for A/B testing. Please read step 2 of [docs/Workflow.md](https://github.com/test-pipeline/orthrus/blob/dev/docs/Workflow.md#step-2-create-instrumented-binaries) if you haven't already. Please note that fuzzed binaries are identical for both the control (A) and experiment (B) groups. 49 | 50 | 51 | ## Step 3: Add/Remove fuzzing job 52 | 53 | Adding/removing a/b test jobs is identical to their routine counterparts, except for the `--abtest=PATH_TO_CONFIG` argument that is appended to the `orthrus add` command. For instance, you can do 54 | ``` 55 | $ cat abtest.conf 56 | { 57 | 58 | "fuzzerA": "afl-fuzz" 59 | "fuzzerA_args": "" 60 | "fuzzerB": "afl-fuzz-fast" 61 | "fuzzerB_args": "" 62 | "num_jobs": 2 63 | } 64 | 65 | $ orthrus add --job="main @@" -s=./seeds --jobtype=abtests --jobconf=./abtest.conf 66 | [+] Adding fuzzing job to Orthrus workspace 67 | [+] Checking Orthrus workspace... done 68 | [+] Adding job... done 69 | [+] Configuring abtests job for ID [1178951622]... done 70 | [+] Adding initial samples for job ID [1178951622]... done 71 | [+] Configuring abtests job for ID [3911664828]... done 72 | [+] Adding initial samples for job ID [3911664828]... done 73 | $ orthrus remove -j 1271685425 74 | [+] Removing fuzzing job from Orthrus workspace 75 | [+] Retrieving job [1271685425]... done 76 | [+] Archiving data for abtests job [1271685425]... done 77 | ``` 78 | 79 | This sets up an A/B testing job in which identical fuzzing jobs will be created for both control (e.g. `afl-fuzz`) and experiment (e.g. `afl-fuzz-fast`) groups. **It is strongly recommended that you have at least 4 processor cores for a/b testing**. This ensures that each job has at least 1 master and 1 slave instance. This is particularly relevant for gauging the efficiency of deterministic fuzzing. See [1]. 80 | 81 | Note that there are three IDs involved. You can ID the configured a/b test via the top-level ID `1271685425`, the control group via ID `1178951622` and the experiment group via ID `3911664828`. For subsequent a/b test subcommands, you always pass the top-level (a/b test) ID e.g., `1271685425`. 82 | 83 | 84 | ## Step 4: Start/Stop afl fuzzers (via afl-utils) 85 | 86 | - To start fuzzing for a pre-defined a/b test job, you do 87 | ``` 88 | $ orthrus start -j 1271685425 89 | [+] Starting fuzzing jobs 90 | [+] Retrieving job ID [1271685425]... done 91 | [+] Tidying afl sync dir for abtests job ID [1178951622]... done 92 | [+] Checking core_pattern... done 93 | [+] Starting AFL ASAN fuzzer as master... done 94 | [*] Starting master instance... 95 | 96 | [+] Master 000 started (PID: 4725) 97 | [*] Starting slave instances... 98 | 99 | [+] Slave 001 started (PID: 4726) 100 | [+] Starting fuzzer for abtests job ID [1178951622]... done 101 | [+] Tidying afl sync dir for abtests job ID [3911664828]... done 102 | [+] Checking core_pattern... done 103 | [+] Starting AFL ASAN fuzzer as master... done 104 | [*] Starting master instance... 105 | 106 | [+] Master 000 started (PID: 4730) 107 | [*] Starting slave instances... 108 | 109 | [+] Slave 001 started (PID: 4731) 110 | [+] Starting fuzzer for abtests job ID [3911664828]... done 111 | ``` 112 | 113 | - To stop fuzzing, you do 114 | ``` 115 | $ orthrus stop -j 1271685425 116 | [+] Stopping fuzzing jobs 117 | [+] Retrieving job ID [1271685425]... done 118 | [+] Stopping abtests job for ID [1271685425]... done 119 | ``` 120 | 121 | - To resume an earlier session, do 122 | ``` 123 | $ orthrus start -j 1271685425 -m 124 | [+] Starting fuzzing jobs 125 | [+] Retrieving job ID [1271685425]... done 126 | [+] Tidying afl sync dir for abtests job ID [1178951622]... done 127 | [+] Minimizing corpus for job [1178951622]... 128 | [*] Looking for fuzzing queues in '/home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/1178951622/afl-out'. 129 | 130 | [*] Found 1 fuzzers, collecting samples. 131 | 132 | [*] Successfully indexed 5 samples. 133 | 134 | [*] Copying 5 samples into collection directory... 135 | 136 | [*] Executing: afl-cmin -i /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/1178951622/collect -o /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/1178951622/collect.cmin -- /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/binaries/afl-asan/bin/main @@ 137 | 138 | [*] Testing the target binary... 139 | 140 | [*] Obtaining traces for input files in '/home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/1178951622/collect'... 141 | 142 | [*] Sorting trace sets (this may take a while)... 143 | 144 | [*] Finding best candidates for each tuple... 145 | 146 | [*] Sorting candidate list (be patient)... 147 | 148 | [*] Processing candidates and writing output files... 149 | 150 | [!] WARNING: All test cases had the same traces, check syntax! 151 | 152 | [*] Performing dry-run in /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/1178951622/collect.cmin... 153 | 154 | [!] Be patient! Depending on the corpus size this step can take hours... 155 | 156 | [!] Collection directory exists and is not empty! 157 | 158 | [!] Skipping collection step... 159 | 160 | [*] Reseeding collect.cmin into queue /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/1178951622/afl-out/SESSION000/queue 161 | 162 | [+] Minimizing afl sync dir for abtests job ID [1178951622]... done 163 | [+] Checking core_pattern... done 164 | [+] Starting AFL ASAN fuzzer as master... done 165 | [*] Starting master instance... 166 | 167 | [+] Master 000 started (PID: 8251) 168 | [*] Starting slave instances... 169 | 170 | [+] Slave 001 started (PID: 8253) 171 | [+] Starting fuzzer for abtests job ID [1178951622]... done 172 | [+] Tidying afl sync dir for abtests job ID [3911664828]... done 173 | [+] Minimizing corpus for job [3911664828]... 174 | [*] Looking for fuzzing queues in '/home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/3911664828/afl-out'. 175 | 176 | [*] Found 1 fuzzers, collecting samples. 177 | 178 | [*] Successfully indexed 4 samples. 179 | 180 | [*] Copying 4 samples into collection directory... 181 | 182 | [*] Executing: afl-cmin -i /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/3911664828/collect -o /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/3911664828/collect.cmin -- /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/binaries/afl-asan/bin/main @@ 183 | 184 | [*] Testing the target binary... 185 | 186 | [*] Obtaining traces for input files in '/home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/3911664828/collect'... 187 | 188 | [*] Sorting trace sets (this may take a while)... 189 | 190 | [*] Finding best candidates for each tuple... 191 | 192 | [*] Sorting candidate list (be patient)... 193 | 194 | [*] Processing candidates and writing output files... 195 | 196 | [!] WARNING: All test cases had the same traces, check syntax! 197 | 198 | [*] Performing dry-run in /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/3911664828/collect.cmin... 199 | 200 | [!] Be patient! Depending on the corpus size this step can take hours... 201 | 202 | [!] Collection directory exists and is not empty! 203 | 204 | [!] Skipping collection step... 205 | 206 | [*] Reseeding collect.cmin into queue /home/bhargava/work/gitlab/orthrus/testdata/Automake-Autoconf-Template-Project/.orthrus/jobs/abtests/1271685425/3911664828/afl-out/SESSION000/queue 207 | 208 | [+] Minimizing afl sync dir for abtests job ID [3911664828]... done 209 | [+] Checking core_pattern... done 210 | [+] Starting AFL ASAN fuzzer as master... done 211 | [*] Starting master instance... 212 | 213 | [+] Master 000 started (PID: 9597) 214 | [*] Starting slave instances... 215 | 216 | [+] Slave 001 started (PID: 9598) 217 | [+] Starting fuzzer for abtests job ID [3911664828]... done 218 | ``` 219 | 220 | 221 | - The `-m` flag minimizes the existing AFL corpus, archives the existing queue 222 | dir, reseeds it with the minimized seeds, and resumes fuzzing 223 | 224 | ## Step 5: Monitor test coverage (via afl-cov) 225 | 226 | Monitoring test coverage for a/b tests is WIP 227 | 228 | ## Step 6: Triage crashes (via afl-utils/exploitable) 229 | 230 | You can triage all crashes for an a/b test job, like so: 231 | 232 | ``` 233 | $ orthrus triage -j 1271685425 234 | [+] Triaging abtests job ID [1271685425]... done 235 | [+] Tidying crash dir... done 236 | [+] Triaging harden mode crashes for abtests job ID [1178951622]... done 237 | [+] Triaging abtests job ID [1271685425]... done 238 | [+] Tidying crash dir... done 239 | [+] Triaging asan mode crashes for abtests job ID [1178951622]... done 240 | [+] Triaging abtests job ID [1271685425]... done 241 | [+] Tidying crash dir... done 242 | [+] Triaging all mode crashes for abtests job ID [1178951622]... done 243 | [+] Triaged 1 crashes. See .orthrus/jobs/abtests/1271685425/1178951622/unique/ 244 | [+] Triaging crashes in control group... done 245 | [+] Triaging abtests job ID [1271685425]... done 246 | [+] Tidying crash dir... done 247 | [+] Triaging harden mode crashes for abtests job ID [3911664828]... done 248 | [+] Triaging abtests job ID [1271685425]... done 249 | [+] Tidying crash dir... done 250 | [+] Triaging asan mode crashes for abtests job ID [3911664828]... done 251 | [+] Triaging abtests job ID [1271685425]... done 252 | [+] Tidying crash dir... done 253 | [+] Triaging all mode crashes for abtests job ID [3911664828]... done 254 | [+] Triaged 1 crashes. See .orthrus/jobs/abtests/1271685425/3911664828/unique/ 255 | [+] Triaging crashes in experiment group... done 256 | ``` 257 | 258 | and view triaged crashes for each group, like so 259 | 260 | ``` 261 | $ orthrus show -j 1271685425 262 | A/B test status 263 | Control group 264 | Fuzzers alive : 2 265 | Total run time : 0 days, 0 hours 266 | Total execs : 0 million 267 | Cumulative speed : 0 execs/sec 268 | Pending paths : 2 faves, 2 total 269 | Pending per fuzzer : 1 faves, 1 total (on average) 270 | Crashes found : 0 locally unique 271 | 272 | Triaged crashes : 0 273 | Experiment group 274 | Fuzzers alive : 2 275 | Total run time : 0 days, 0 hours 276 | Total execs : 0 million 277 | Cumulative speed : 0 execs/sec 278 | Pending paths : 2 faves, 2 total 279 | Pending per fuzzer : 1 faves, 1 total (on average) 280 | Crashes found : 0 locally unique 281 | 282 | Triaged crashes : 0 283 | ``` 284 | 285 | ## Step 7: Spectrum analysis 286 | 287 | Spectrum analysis is WIP 288 | 289 | ## Step 8: User interface for fuzz status and coverage 290 | 291 | - You may view configured jobs, like so 292 | ``` 293 | $ orthrus show -conf 294 | Configured a/b tests: 295 | 0) [1271685425] main @@ 296 | Control group 297 | Fuzzer A: afl-fuzz Fuzzer A args: 298 | Experiment group 299 | Fuzzer B: afl-fuzz-fast Fuzzer B args: 300 | ``` 301 | 302 | - You may view the current status of afl-fuzz instances (via afl-whatsup) 303 | ``` 304 | $ orthrus show -j 1271685425 305 | A/B test status 306 | Control group 307 | Fuzzers alive : 2 308 | Total run time : 0 days, 0 hours 309 | Total execs : 0 million 310 | Cumulative speed : 0 execs/sec 311 | Pending paths : 2 faves, 2 total 312 | Pending per fuzzer : 1 faves, 1 total (on average) 313 | Crashes found : 0 locally unique 314 | 315 | Triaged crashes : 0 316 | Experiment group 317 | Fuzzers alive : 2 318 | Total run time : 0 days, 0 hours 319 | Total execs : 0 million 320 | Cumulative speed : 0 execs/sec 321 | Pending paths : 2 faves, 2 total 322 | Pending per fuzzer : 1 faves, 1 total (on average) 323 | Crashes found : 0 locally unique 324 | 325 | Triaged crashes : 0 326 | ``` 327 | 328 | - Coverage measurement for A/B tests is WIP. 329 | 330 | ## Step 9: Destroy orthrus session 331 | 332 | See [Step 8 of Workflow](https://github.com/test-pipeline/orthrus/blob/dev/docs/Workflow.md#step-8-destroy-orthrus-session) 333 | 334 | [1]: https://groups.google.com/d/msg/afl-users/fOPeb62FZUg/LYxgPYheDwAJ 335 | -------------------------------------------------------------------------------- /fuzzer/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/test-pipeline/orthrus/7e916f36ceffcc4fdd9013a4d952649f69738aa8/fuzzer/__init__.py -------------------------------------------------------------------------------- /gdb-orthrus/gdb_orthrus.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Gdb-Orthrus 3 | ''' 4 | 5 | try: 6 | import gdb 7 | except ImportError as e: 8 | raise ImportError("This script must be run in GDB: ", str(e)) 9 | 10 | import json 11 | import re 12 | 13 | class GdbOrthrus(gdb.Function): 14 | """JSONify core dump via GDB python plugin. Takes jsonfile as arg""" 15 | 16 | 17 | _re_gdb_bt = re.compile(r""" 18 | ^\#(?P[0-9]+)\s* 19 | ((?P
0x[A-Fa-f0-9]+)\s*)? 20 | (in\s(?P[A-Za-z0-9_:\-\?<>,]+)\s*)? 21 | (?P\([A-Za-z0-9_\'\"\\\=\-:\&\,\s\*]*\)\s*)? 22 | (at\s*(?P[A-Za-z0-9_\.\-]*):(?P[0-9]+)\s*)? 23 | ((\s)?\((?P.+?)\+(?P0x[A-Fa-f0-9]+)\))? 24 | """, re.MULTILINE | re.VERBOSE) 25 | _re_gdb_exploitable = re.compile(r".*Description: (?P[\w|\s]+).*" 26 | r"Short description: (?P[\w|\s\(\)\/]+).*" 27 | r"Hash: (?P[0-9A-Za-z\.]+).*" 28 | r"Exploitability Classification: (?P[A-Z_]+).*" 29 | r"Explanation: (?P[\w|\s|\.|\/,]+).*" 30 | r"Other tags: (?P[\w|\s,\(\)\/]+).*", 31 | re.DOTALL) 32 | 33 | def __init__(self): 34 | super(GdbOrthrus, self).__init__("jsonify") 35 | self.gdb_dict = {} 36 | 37 | def invoke(self, jsonfile): 38 | self.jsonfile = jsonfile.string() 39 | 40 | ## Get and parse backtrace 41 | bt_string = gdb.execute("bt", False, True) 42 | bt_dict = {} 43 | for match in self._re_gdb_bt.finditer(bt_string): 44 | frame_no, address, func, paramlist, filename, line, module, offset = \ 45 | match.group("frame_no", "address", "func", "paramlist", "file", "line", "module", "offset") 46 | frame_str = "frame{}".format(frame_no) 47 | 48 | bt_dict[frame_str] = {"frame_no": frame_no, "address": address, "function": func, "func_params": paramlist } 49 | 50 | if filename and line: 51 | bt_dict[frame_str]['file'] = filename 52 | bt_dict[frame_str]['line'] = line 53 | if module and offset: 54 | bt_dict[frame_str]['module'] = module 55 | bt_dict[frame_str]['offset'] = offset 56 | self.gdb_dict['backtrace'] = bt_dict 57 | self.gdb_dict['debug'] = bt_string 58 | 59 | 60 | # Parse fault address and exploitable output 61 | self.gdb_dict['fault_addr'] = gdb.execute('printf "%#lx", $_siginfo._sifields._sigfault.si_addr', False, True) 62 | exp_string = gdb.execute('exploitable', False, True) 63 | match = self._re_gdb_exploitable.match(exp_string) 64 | if match is not None: 65 | exp_dict = {} 66 | exp_dict['description'] = match.group("desc").rstrip() 67 | exp_dict['short_desc'] = match.group("shortdesc").rstrip() 68 | exp_dict['hash'] = match.group("hash").rstrip() 69 | exp_dict['classification'] = match.group("class").rstrip() 70 | exp_dict['explanation'] = match.group("explain").rstrip() 71 | exp_dict['tags'] = match.group("other").rstrip() 72 | self.gdb_dict['exploitable_info'] = exp_dict 73 | 74 | with open(self.jsonfile, 'w') as fp: 75 | json.dump(self.gdb_dict, fp, indent=4) 76 | return True 77 | 78 | GdbOrthrus() -------------------------------------------------------------------------------- /install_deps.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | echo -e "\t[+] Fetching afl-latest" 3 | wget -q http://lcamtuf.coredump.cx/afl/releases/afl-latest.tgz &> /dev/null 4 | tar xzf afl-latest.tgz &> /dev/null 5 | rm -f afl-latest.tgz && cd afl-* 6 | echo -e "\t[+] Installing afl" 7 | sudo make install 8 | cd .. 9 | echo -e "\t[+] Install aflfast" 10 | git clone https://github.com/mboehme/aflfast.git 11 | cd aflfast 12 | make && sudo mv afl-fuzz /usr/local/bin/afl-fuzz-fast 13 | cd .. 14 | echo -e "\t[+] Setting core_pattern" 15 | echo core | sudo tee /proc/sys/kernel/core_pattern 16 | echo -e "\t[+] Running autotools in test dir" 17 | cd testdata/Automake-Autoconf-Template-Project 18 | libtoolize --force 19 | aclocal && automake --force-missing --add-missing && autoconf 20 | cd ../../ 21 | echo -e "\t[+] Installing afl-utils" 22 | wget -q https://github.com/rc0r/afl-utils/archive/v1.32a.tar.gz && tar xzf v1.32a.tar.gz 23 | rm v1.32a.tar.gz && cd afl-utils-1.32a 24 | sudo mkdir -p /usr/lib/python3.4/site-packages && sudo python3 setup.py install 25 | cd ../ 26 | echo -e "\t[+] Setting up GDB and exploitable" 27 | cat <> ~/.gdbinit 28 | source /usr/lib/python3.4/site-packages/exploitable-1.32_rcor-py3.4.egg/exploitable/exploitable.py 29 | source ~/.orthrus/gdb_orthrus.py 30 | define hook-quit 31 | set confirm off 32 | end 33 | set pagination off 34 | EOF 35 | echo -e "\t[+] Installing afl-cov" 36 | wget -q https://github.com/mrash/afl-cov/archive/0.6.tar.gz && tar xzf 0.6.tar.gz 37 | rm 0.6.tar.gz && cd afl-cov-0.6 38 | sudo cp afl-cov /usr/local/bin/ 39 | cd .. 40 | echo -e "\t[+] Installing pysancov" 41 | wget -q https://raw.githubusercontent.com/llvm-mirror/compiler-rt/release_38/lib/sanitizer_common/scripts/sancov.py &> /dev/null 42 | chmod +x sancov.py &> /dev/null 43 | sudo mv sancov.py /usr/local/bin/pysancov &> /dev/null 44 | echo -e "\t[+] Copy gdb-orthrus.py to orthrus-local" 45 | mkdir -p $HOME/.orthrus 46 | wget https://raw.githubusercontent.com/test-pipeline/orthrus/master/gdb-orthrus/gdb_orthrus.py -P $HOME/.orthrus 47 | CLANG_SDICT_DB="https://www.dropbox.com/s/lqayfheheo3coag/fork-6173707-6216-gb24cc33-153448-2017-04-10.tar.gz?dl=0" 48 | curl -o clang.tar.gz -L "$CLANG_SDICT_DB" && tar xzf clang.tar.gz -C $HOME && chmod +x $HOME/local/bin/clang-sdict 49 | echo -e "\t[+] Install bear v2.1.5" 50 | wget https://launchpadlibrarian.net/240291131/bear_2.1.5.orig.tar.gz && tar xzf bear_2.1.5.orig.tar.gz && rm bear_2.1.5.orig.tar.gz 51 | mkdir Bear-2.1.5.build && cd Bear-2.1.5.build && cmake ../Bear-2.1.5 && make -j all && sudo make install && cd .. && rm -rf Bear-2.1.5 Bear-2.1.5.build 52 | -------------------------------------------------------------------------------- /job/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/test-pipeline/orthrus/7e916f36ceffcc4fdd9013a4d952649f69738aa8/job/__init__.py -------------------------------------------------------------------------------- /job/job.py: -------------------------------------------------------------------------------- 1 | import binascii 2 | import json 3 | import os 4 | import string 5 | 6 | ''' 7 | What is a job? 8 | It could be a routine (fuzzing) job or an a/b test job 9 | What characteristics of a job would we want to define here? 10 | It can be added, removed, started, stopped, coveraged, triaged, showed 11 | All orthrus subcommands except create and destroy are invoked on specific jobs 12 | 13 | I guess a good starting point to define a job would be as a data-type rather than as a functional entity. 14 | As a data-type, a job has an ID, type (routine/abtest), rootdir, 15 | 16 | How do we pass jobs around. One way is to make a getter for 17 | 18 | ''' 19 | 20 | JOBCONF = '/jobs/jobs.conf' 21 | ROUTINEDIR = '/jobs/routine' 22 | ABTESTSDIR = '/jobs/abtests' 23 | 24 | JOBCONF_DICT = {'routine': [], 'abtests': []} 25 | 26 | 27 | def bootstrap(jobsconf): 28 | with open(jobsconf, 'wb') as jobconf_fp: 29 | json.dump(JOBCONF_DICT, jobconf_fp, indent=4) 30 | 31 | 32 | def does_id_exist(jobsconf, id): 33 | with open(jobsconf, 'r') as jobconf_fp: 34 | jobsconf_dict = json.load(jobconf_fp) 35 | 36 | # Check routine list 37 | routine_jobs = jobsconf_dict['routine'] 38 | val = [item for item in routine_jobs if item['id'] == id] 39 | if val: 40 | return val[0] 41 | 42 | # Check abtests list 43 | abtests_jobs = jobsconf_dict['abtests'] 44 | val = [item for item in abtests_jobs if item['id'] == id] 45 | if val: 46 | return val[0] 47 | 48 | return None 49 | 50 | def remove_id_from_conf(jobsconf, id, type): 51 | with open(jobsconf, 'r') as jobconf_fp: 52 | jobsconf_dict = json.load(jobconf_fp) 53 | 54 | # Find and remove in the typed list 55 | if type == 'routine': 56 | [jobsconf_dict['routine'].remove(item) for item in jobsconf_dict['routine'] if item['id'] == id] 57 | else: 58 | [jobsconf_dict['abtests'].remove(item) for item in jobsconf_dict['abtests'] if item['id'] == id] 59 | 60 | # Update jobs.conf 61 | with open(jobsconf, 'w') as jobconf_fp: 62 | json.dump(jobsconf_dict, jobconf_fp, indent=4) 63 | 64 | class job(object): 65 | 66 | def __init__(self, fuzz_cmd, jobtype, orthrusdir, jobconf=None): 67 | 68 | self.jobconf = jobconf 69 | self.jobtype = jobtype 70 | self.orthrusdir = orthrusdir 71 | self.jobsconf = self.orthrusdir + JOBCONF 72 | self.fuzz_cmd = fuzz_cmd 73 | self.jobids = [] 74 | self.fuzzers = [] 75 | self.fuzzer_args = [] 76 | 77 | ## Bootstap jobs.conf if necessary 78 | if not os.path.exists(self.jobsconf): 79 | bootstrap(self.jobsconf) 80 | 81 | def parse_and_validate_routine_jobconf(self): 82 | with open(self.jobconf, 'rb') as jc_fp: 83 | self.data = json.load(jc_fp) 84 | 85 | self.fuzzers.append(self.data['fuzzer']) 86 | self.fuzzer_args.append(self.data['fuzzer_args']) 87 | return True 88 | 89 | def parse_and_validate_abtests_jobconf(self): 90 | with open(self.jobconf, 'rb') as abconf_fp: 91 | self.abconf_data = json.load(abconf_fp) 92 | 93 | # Multi-variate tests must have even number of jobs 94 | if self.abconf_data['num_jobs'] % 2: 95 | return False 96 | 97 | self.num_jobs = self.abconf_data['num_jobs'] 98 | 99 | for i in range(0, self.num_jobs): 100 | if not self.abconf_data['fuzzer{}'.format(string.ascii_uppercase[i])]: 101 | return False 102 | 103 | return True 104 | 105 | def update_jobs_conf(self): 106 | 107 | with open(self.jobsconf, 'r') as jobconf_fp: 108 | jobsconf_dict = json.load(jobconf_fp) 109 | 110 | if self.jobtype == 'routine': 111 | routine_dict = {'id': self.id, 'target': self.target, 'params': self.params, 'type': self.jobtype, 112 | 'fuzzer': self.fuzzers[0], 'fuzzer_args': self.fuzzer_args[0]} 113 | jobsconf_dict['routine'].append(routine_dict) 114 | elif self.jobtype == 'abtests': 115 | abtests_dict = {'id': self.id, 'target': self.target, 'params': self.params, 116 | 'jobids': self.jobids, 'fuzzers': self.fuzzers, 117 | 'fuzzer_args': self.fuzzer_args, 'type': self.jobtype, 'num_jobs': self.num_jobs} 118 | jobsconf_dict['abtests'].append(abtests_dict) 119 | 120 | # Overwrites JSON file 121 | with open(self.jobsconf, 'w') as jobconf_fp: 122 | json.dump(jobsconf_dict, jobconf_fp, indent=4) 123 | 124 | def create_dirs(self): 125 | 126 | # Routine job and no routine dir 127 | if self.jobtype == 'routine' and not os.path.exists(self.orthrusdir + ROUTINEDIR): 128 | os.makedirs(self.orthrusdir + ROUTINEDIR) 129 | # Abtests job and no abtests dir 130 | elif self.jobtype == 'abtests' and not os.path.exists(self.orthrusdir + ABTESTSDIR): 131 | os.makedirs(self.orthrusdir + ABTESTSDIR) 132 | 133 | if self.jobtype == 'routine': 134 | os.makedirs(self.orthrusdir + ROUTINEDIR + '/{}'.format(self.id)) 135 | elif self.jobtype == 'abtests': 136 | os.makedirs(self.orthrusdir + ABTESTSDIR + '/{}'.format(self.id)) 137 | for i in range(0, self.num_jobs): 138 | os.makedirs(self.orthrusdir + ABTESTSDIR + '/{}'.format(self.id) + '/{}'.format(self.jobids[i])) 139 | 140 | def materialize(self): 141 | 142 | if not (self.jobtype == 'routine' or self.jobtype == 'abtests'): 143 | raise ValueError 144 | 145 | if self.jobtype == 'abtests' and not self.jobconf: 146 | raise ValueError 147 | 148 | if self.jobtype == 'abtests': 149 | if not self.parse_and_validate_abtests_jobconf(): 150 | raise ValueError 151 | else: 152 | self.parse_and_validate_routine_jobconf() 153 | 154 | ## Break down fuzz_cmd 155 | self.target = self.fuzz_cmd.split(" ")[0] 156 | self.params = " ".join(self.fuzz_cmd.split(" ")[1:]) 157 | 158 | if self.jobtype == 'routine': 159 | crcstring = self.fuzz_cmd 160 | self.id = str(binascii.crc32(crcstring) & 0xffffffff) 161 | self.rootdir = self.orthrusdir + ROUTINEDIR + '/{}'.format(self.id) 162 | else: 163 | crcstring = self.fuzz_cmd 164 | for i in range(0, self.num_jobs): 165 | fuzzername = 'fuzzer{}'.format(string.ascii_uppercase[i]) 166 | fuzzerargs = fuzzername + '_args' 167 | crcstring += self.abconf_data[fuzzername] + self.abconf_data[fuzzerargs] 168 | self.jobids.append(str(binascii.crc32(self.fuzz_cmd+str(i)) & 0xffffffff)) 169 | self.fuzzers.append(self.abconf_data[fuzzername]) 170 | self.fuzzer_args.append(self.abconf_data[fuzzerargs]) 171 | self.id = str(binascii.crc32(crcstring) & 0xffffffff) 172 | self.rootdir = self.orthrusdir + ABTESTSDIR + '/{}'.format(self.id) 173 | 174 | # Check if ID exists in jobs.conf 175 | if does_id_exist(self.jobsconf, self.id): 176 | return False 177 | 178 | self.update_jobs_conf() 179 | self.create_dirs() 180 | return True 181 | 182 | 183 | class jobtoken(object): 184 | 185 | def __init__(self, orthrusdir, jobid): 186 | self.jobsconf = orthrusdir + JOBCONF 187 | self.id = jobid 188 | self.orthrusdir = orthrusdir 189 | 190 | def materialize(self): 191 | ## Bootstap jobs.conf if necessary 192 | if not os.path.exists(self.jobsconf): 193 | raise ValueError 194 | 195 | ## Check if jobid exists 196 | self._jobdesc = does_id_exist(self.jobsconf, self.id) 197 | if not self._jobdesc: 198 | raise ValueError 199 | assert self.id == self._jobdesc['id'], 'Job token ID assertion failed!' 200 | 201 | self.params = self._jobdesc['params'] 202 | self.target = self._jobdesc['target'] 203 | self.type = self._jobdesc['type'] 204 | if self.type == 'abtests': 205 | self.rootdir = self.orthrusdir + ABTESTSDIR + '/{}'.format(self.id) 206 | self.jobids = self._jobdesc['jobids'] 207 | self.fuzzers = self._jobdesc['fuzzers'] 208 | self.fuzzer_args = self._jobdesc['fuzzer_args'] 209 | self.num_jobs = self._jobdesc['num_jobs'] 210 | else: 211 | self.rootdir = self.orthrusdir + ROUTINEDIR + '/{}'.format(self.id) 212 | self.fuzzers = self._jobdesc['fuzzer'] 213 | self.fuzzer_args = self._jobdesc['fuzzer_args'] 214 | return True -------------------------------------------------------------------------------- /orthrus/__init__.py: -------------------------------------------------------------------------------- 1 | __author_name__ = 'Bhargava Shastry, and Markus Leutner' 2 | __author_email__ = 'https://github.com/test-pipeline/orthrus' 3 | __version__ = "1.2" 4 | -------------------------------------------------------------------------------- /orthrusutils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/test-pipeline/orthrus/7e916f36ceffcc4fdd9013a4d952649f69738aa8/orthrusutils/__init__.py -------------------------------------------------------------------------------- /orthrusutils/orthrusutils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import subprocess 3 | import os 4 | import shutil 5 | import ConfigParser 6 | from argparse import ArgumentParser 7 | import errno 8 | import glob 9 | import logging 10 | 11 | CREATE_HELP = """Create an orthrus workspace""" 12 | ADD_HELP = """Add a fuzzing job""" 13 | REMOVE_HELP = """Remove a fuzzing job""" 14 | START_HELP = """Start a fuzzing jobs""" 15 | STOP_HELP = """Stop a fuzzing jobs""" 16 | SHOW_HELP = """Show what's currently going on""" 17 | TRIAGE_HELP = """Triage crash corpus""" 18 | COVERAGE_HELP = """Run afl-cov on existing AFL corpus""" 19 | SPECTRUM_HELP = """Run spectrum based analysis on existing AFL corpus""" 20 | RUNTIME_HELP = """Perform dynamic analysis of existing AFL corpus""" 21 | DESTROY_HELP = """Destroy an orthrus workspace""" 22 | VALIDATE_HELP = """Check if all Orthrus dependencies are met""" 23 | ## A/B Tests 24 | TEST_ADD_HELP = """Add a test job of given type and config""" 25 | 26 | TEST_SLEEP = 5 27 | 28 | class bcolors: 29 | HEADER = '\033[95m' 30 | OKBLUE = '\033[94m' 31 | OKGREEN = '\033[92m' 32 | WARNING = '\033[93m' 33 | ERROR = '\033[91m' 34 | INFO = '\033[93m' 35 | FAIL = '\033[91m' 36 | ENDC = '\033[0m' 37 | BOLD = '\033[1m' 38 | UNDERLINE = '\033[4m' 39 | 40 | def color_print(color, msg): 41 | sys.stdout.write(color + msg + bcolors.ENDC + "\n") 42 | sys.stdout.flush() 43 | 44 | def color_print_singleline(color, msg): 45 | sys.stdout.write(color + msg + bcolors.ENDC) 46 | sys.stdout.flush() 47 | 48 | def run_cmd(command, env=None, logfile=None): 49 | if not logfile: 50 | logfile = os.devnull 51 | 52 | if not env: 53 | env = os.environ.copy() 54 | 55 | logfh = open(logfile, 'w') 56 | proc = subprocess.Popen(command, shell=True, executable='/bin/bash', 57 | env=env, stdout=logfh, stderr=subprocess.STDOUT) 58 | ret = proc.wait() 59 | logfh.close() 60 | 61 | if ret != 0: 62 | return False 63 | return True 64 | 65 | 66 | def return_elf_binaries(inpath=None): 67 | 68 | if not inpath: 69 | # Search everywhere in working dir except in .orthrus 70 | command = "find -type f -executable -not -path \"./.orthrus/*\"" \ 71 | " -exec file -i '{}' \; | grep 'x-executable; charset=binary' | cut -d':' -f1" 72 | else: 73 | # Search everywhere in specified path 74 | command = "find {} -type f ".format(inpath) + "-executable -exec file -i '{}' \; | " \ 75 | "grep 'x-executable; charset=binary' | " \ 76 | "cut -d':' -f1" 77 | output = subprocess.check_output(command, shell=True) 78 | return filter(None, output.split("\n")) 79 | 80 | def copy_binaries(dest): 81 | # Create bin dir if it doesn't exist 82 | mkdir_p(dest) 83 | # if not os.path.isdir(dest): 84 | # os.makedirs(dest) 85 | 86 | binaries = return_elf_binaries() 87 | # Overwriting existing binaries is fine 88 | for f in binaries: 89 | if not os.path.isfile(dest + os.path.basename(f)): 90 | shutil.copy(f, dest) 91 | 92 | 93 | def parse_cmdline(description, args, createfunc=None, addfunc=None, removefunc=None, 94 | startfunc=None, stopfunc=None, showfunc=None, triagefunc=None, 95 | coveragefunc=None, spectrumfunc=None, runtimefunc=None, destroyfunc=None, validatefunc=None): 96 | argParser = ArgumentParser(description) 97 | 98 | argParser.add_argument('-v', '--verbose', 99 | action='store_true', 100 | help="""Verbose mode, print information about the progress""", 101 | default=False) 102 | 103 | subparsers = argParser.add_subparsers(description="Orthrus subcommands") 104 | 105 | # Command 'create' 106 | create_parser = subparsers.add_parser('create', help=CREATE_HELP) 107 | create_parser.add_argument('-asan', '--afl-asan', 108 | action='store_true', 109 | help="""Setup binaries for afl with AddressSanitizer""", 110 | default=False) 111 | create_parser.add_argument('-asanblacklist', '--afl-asan-blacklist', action='store_true', 112 | help="""Setup binaries for afl with AddressSanitizer with issue supression.""", 113 | default=False) 114 | create_parser.add_argument('-fuzz', '--afl-harden', 115 | action='store_true', 116 | help="""Setup binaries for afl in 'harden' mode (stack-protector, fortify)""", 117 | default=False) 118 | create_parser.add_argument('-cov', '--coverage', 119 | action='store_true', 120 | help="""Setup binaries to collect coverage information""", 121 | default=False) 122 | create_parser.add_argument('-sancov', '--san-coverage', 123 | action='store_true', 124 | help="""Setup binaries to collect sanitizer coverage information""", 125 | default=False) 126 | create_parser.add_argument('-dict', '--dictionary', 127 | action='store_true', 128 | help="""Generate input dictionary for fuzzing""", 129 | default=False) 130 | create_parser.add_argument('-d', '--configure-flags', nargs='?', 131 | type=str, default="", 132 | help='Additional flags for configuring the source') 133 | create_parser.set_defaults(func=createfunc) 134 | 135 | # Command 'add' 136 | add_parser = subparsers.add_parser('add', help=ADD_HELP) 137 | add_parser.add_argument('-n', '--job', required=True, type=str, 138 | help='Add a job with executable command line invocation string') 139 | add_parser.add_argument('-i', '--import', dest='_import', nargs='?', 140 | type=str, default="", 141 | help='Import an AFL fuzzing output directory provided as tar.gz') 142 | add_parser.add_argument('-s', '--sample', nargs='?', 143 | type=str, default="", 144 | help='A single file or directory of afl testcases for fuzzing') 145 | add_parser.add_argument('-type', '--jobtype', nargs='?', type=str, 146 | default="", help=TEST_ADD_HELP) 147 | add_parser.add_argument('-conf', '--jobconf', nargs='?', type=str, 148 | default="", help=TEST_ADD_HELP) 149 | add_parser.set_defaults(func=addfunc) 150 | 151 | # Command 'remove' 152 | remove_parser = subparsers.add_parser('remove', help=REMOVE_HELP) 153 | remove_parser.add_argument('-j', '--job-id', required=True, 154 | type=str, help='Job Id for the job which should be removed') 155 | remove_parser.set_defaults(func=removefunc) 156 | 157 | # Command 'start' 158 | start_parser = subparsers.add_parser('start', help=START_HELP) 159 | start_parser.add_argument('-j', '--job-id', required=True, 160 | type=str, help='Job Id for the job which should be started') 161 | start_parser.add_argument('-c', '--coverage', 162 | action='store_true', 163 | help="""Collect coverage information while fuzzing""", 164 | default=False) 165 | start_parser.add_argument('-m', '--minimize', 166 | action='store_true', 167 | help="""Minimize corpus before start""", 168 | default=False) 169 | start_parser.set_defaults(func=startfunc) 170 | 171 | # Command 'stop' 172 | stop_parser = subparsers.add_parser('stop', help=STOP_HELP) 173 | stop_parser.add_argument('-c', '--coverage', 174 | action='store_true', 175 | help="""Stop afl-cov instances on stop""", 176 | default=False) 177 | stop_parser.add_argument('-j', '--job-id', required=True, 178 | type=str, help='Job Id for the job which should be stopped') 179 | stop_parser.set_defaults(func=stopfunc) 180 | 181 | # Command 'show' 182 | show_parser = subparsers.add_parser('show', help=SHOW_HELP) 183 | show_parser.add_argument('-j', '--job-id', type=str, help='Job Id for the job which should be stopped') 184 | show_parser.add_argument('-conf', '--conf', action='store_true', help="""Show configured jobs""", default=False) 185 | show_parser.add_argument('-cov', '--cov', action='store_true', help="""Show coverage of job""", default=False) 186 | show_parser.set_defaults(func=showfunc) 187 | 188 | # Command 'triage' 189 | triage_parser = subparsers.add_parser('triage', help=TRIAGE_HELP) 190 | triage_parser.add_argument('-j', '--job-id', required=True, 191 | type=str, default="", 192 | help="""Job Id for the job which should be triaged""") 193 | triage_parser.set_defaults(func=triagefunc) 194 | 195 | # Command 'coverage' 196 | coverage_parser = subparsers.add_parser('coverage', help=COVERAGE_HELP) 197 | coverage_parser.add_argument('-j', '--job-id', type=str, default="", required=True, 198 | help="""Job Id for checking coverage""") 199 | coverage_parser.set_defaults(func=coveragefunc) 200 | 201 | # Command 'spectrum': Merges afl-sancov functionality 202 | spectrum_parser = subparsers.add_parser('spectrum', help=SPECTRUM_HELP) 203 | spectrum_parser.add_argument('-j', '--job-id', type=str, default="", required=True, 204 | help="""Job Id for spectrum based analysis""") 205 | 206 | spectrum_parser.add_argument("-O", "--overwrite", action='store_true', 207 | help="Overwrite existing coverage results", default=False) 208 | spectrum_parser.add_argument("--disable-cmd-redirection", action='store_true', 209 | help="Disable redirection of command results to /dev/null", 210 | default=False) 211 | spectrum_parser.add_argument("--coverage-include-lines", action='store_true', 212 | help="Include lines in zero-coverage status files", 213 | default=False) 214 | spectrum_parser.add_argument("--preserve-all-sancov-files", action='store_true', 215 | help="Keep all sancov files (not usually necessary)", 216 | default=False) 217 | spectrum_parser.add_argument("-v", "--verbose", action='store_true', 218 | help="Verbose mode", default=False) 219 | spectrum_parser.add_argument("-V", "--version", action='store_true', 220 | help="Print version and exit", default=False) 221 | spectrum_parser.add_argument("-q", "--quiet", action='store_true', 222 | help="Quiet mode", default=False) 223 | spectrum_parser.add_argument("--sancov-path", type=str, 224 | help="Path to sancov binary", default=which('sancov-3.8')) 225 | spectrum_parser.add_argument("--pysancov-path", type=str, 226 | help="Path to sancov.py script (in clang compiler-rt)", 227 | default=which("pysancov")) 228 | spectrum_parser.add_argument("--llvm-sym-path", type=str, 229 | help="Path to llvm-symbolizer", default=which("llvm-symbolizer-3.8")) 230 | spectrum_parser.add_argument("--dd-num", type=int, 231 | help="Experimental! Perform more compute intensive analysis of crashing input by comparing its" 232 | "path profile with aggregated path profiles of N=dd-num randomly selected non-crashing inputs", 233 | default=1) 234 | spectrum_parser.add_argument("--sancov-bug", action='store_true', 235 | help="Sancov bug that occurs for certain coverage_dir env vars", default=False) 236 | spectrum_parser.set_defaults(func=spectrumfunc) 237 | 238 | # Command 'runtime' 239 | runtime_parser = subparsers.add_parser('runtime', help=RUNTIME_HELP) 240 | runtime_parser.add_argument('-j', '--job-id', type=str, default="", required=True, 241 | help="""Job Id for dynamic analysis""") 242 | runtime_parser.add_argument("-regen", "--regenerate", action='store_true', 243 | help="Regenerate dynamic info, archiving old results", default=False) 244 | runtime_parser.set_defaults(func=runtimefunc) 245 | 246 | # Command 'destroy' 247 | destroy_parser = subparsers.add_parser('destroy', help=DESTROY_HELP) 248 | destroy_parser.set_defaults(func=destroyfunc) 249 | 250 | # Command 'validate' 251 | validate_parser = subparsers.add_parser('validate', help=VALIDATE_HELP) 252 | validate_parser.set_defaults(func=validatefunc) 253 | 254 | return argParser.parse_args(args) 255 | 256 | def parse_config(configfile=None): 257 | config = {} 258 | if not configfile: 259 | configfile = os.path.expanduser('~/.orthrus/orthrus.conf') 260 | 261 | configparser = ConfigParser.ConfigParser() 262 | configparser.read(configfile) 263 | 264 | config['orthrus'] = {} 265 | config['orthrus']['directory'] = configparser.get("orthrus", "directory") 266 | 267 | config['dependencies'] = configparser.items("dependencies") 268 | 269 | return config 270 | 271 | def min_or_reseed_setup(orthrus_dir, target, params): 272 | export = {} 273 | export['PYTHONUNBUFFERED'] = "1" 274 | env = os.environ.copy() 275 | env.update(export) 276 | isasan = False 277 | 278 | if os.path.exists(orthrus_dir + "/binaries/afl-harden"): 279 | launch = orthrus_dir + "/binaries/afl-harden/bin/" + target + " " + \ 280 | params.replace("&", "\&") 281 | else: 282 | isasan = True 283 | launch = orthrus_dir + "/binaries/afl-asan/bin/" + target + " " + \ 284 | params 285 | 286 | if isasan and is64bit(): 287 | mem_limit = 30000000 288 | else: 289 | mem_limit = 800 290 | return (env, launch, mem_limit) 291 | 292 | def minimize_sync_dir(orthrus_dir, jobroot_dir, job_id, target, params): 293 | color_print(bcolors.OKGREEN, "\t\t[+] Minimizing corpus for job [" + job_id + "]...") 294 | 295 | env, launch, mem_limit = min_or_reseed_setup(orthrus_dir, target, params) 296 | 297 | cmin = " ".join( 298 | ["afl-minimize", "-c", jobroot_dir + "/collect", "--cmin", 299 | "--cmin-mem-limit={}".format(mem_limit), "--cmin-timeout=5000", "--dry-run", 300 | jobroot_dir + "/afl-out", "--", "'" + launch + "'"]) 301 | p = subprocess.Popen(cmin, bufsize=0, shell=True, executable='/bin/bash', env=env, stdout=subprocess.PIPE) 302 | for line in p.stdout: 303 | if "[*]" in line or "[!]" in line: 304 | color_print(bcolors.OKGREEN, "\t\t\t" + line) 305 | return True 306 | 307 | def reseed_sync_dir(orthrus_dir, jobroot_dir, job_id, target, params): 308 | color_print(bcolors.OKGREEN, "\t\t[+] Reseeding job [" + job_id + "]...") 309 | 310 | env, launch, mem_limit = min_or_reseed_setup(orthrus_dir, target, params) 311 | 312 | reseed_cmd = " ".join( 313 | ["afl-minimize", "-c", jobroot_dir + "/collect.cmin", 314 | "--reseed", jobroot_dir + "/afl-out", "--", 315 | "'" + launch + "'"]) 316 | p = subprocess.Popen(reseed_cmd, bufsize=0, shell=True, executable='/bin/bash', env=env, stdout=subprocess.PIPE) 317 | for line in p.stdout: 318 | if "[*]" in line or "[!]" in line: 319 | color_print(bcolors.OKGREEN, "\t\t\t" + line) 320 | 321 | if os.path.exists(jobroot_dir + "/collect"): 322 | shutil.rmtree(jobroot_dir + "/collect") 323 | if os.path.exists(jobroot_dir + "/collect.cmin"): 324 | shutil.rmtree(jobroot_dir + "/collect.cmin") 325 | return True 326 | 327 | def minimize_and_reseed(orthrus_dir, jobroot_dir, job_id, target, params): 328 | minimize_sync_dir(orthrus_dir, jobroot_dir, job_id, target, params) 329 | reseed_sync_dir(orthrus_dir, jobroot_dir, job_id, target, params) 330 | return True 331 | 332 | def is64bit(): 333 | cmd = 'uname -m' 334 | try: 335 | if 'x86_64' in subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT): 336 | return True 337 | except subprocess.CalledProcessError as e: 338 | print e.output 339 | return False 340 | 341 | def getnproc(): 342 | cmd = 'nproc' 343 | try: 344 | nproc = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) 345 | except subprocess.CalledProcessError: 346 | return 1 347 | return nproc.rstrip() 348 | 349 | # def printfile(filename): 350 | # cmd = 'cat ' + filename 351 | # print subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) 352 | 353 | def which(progname): 354 | cmd = 'which ' + progname 355 | try: 356 | path = os.path.expanduser(subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT).rstrip()) 357 | except subprocess.CalledProcessError as e: 358 | print e.output 359 | return '' 360 | return os.path.abspath(path) 361 | 362 | def run_afl_cov(orthrus_dir, jobroot_dir, target_arg, params, livemode=False, test=False): 363 | target = orthrus_dir + "/binaries/coverage/gcc/bin/" + \ 364 | target_arg + " " + params.replace("@@", "AFL_FILE") 365 | 366 | if livemode: 367 | cmd = ["nohup", "afl-cov", "-d", jobroot_dir + "/afl-out", "--live", "--lcov-path", which('lcov'), 368 | "--genhtml-path", which('genhtml'), "--coverage-cmd", "'" + target + "'", "--code-dir", "."] 369 | if test: 370 | cmd.extend(['--sleep', str(TEST_SLEEP)]) 371 | else: 372 | cmd = ["nohup", "afl-cov", "-d", jobroot_dir + "/afl-out", "--lcov-path", which('lcov'), "--genhtml-path", 373 | which('genhtml'), "--coverage-cmd", "'" + target + "'", "--code-dir", "."] 374 | logfile = orthrus_dir + "/logs/afl-coverage.log" 375 | p = subprocess.Popen(" ".join(cmd), shell=True, executable="/bin/bash", stdout=open(logfile, 'w'), 376 | stderr=subprocess.STDOUT) 377 | return True 378 | 379 | def validate_inst(config): 380 | 381 | if not config['dependencies']: 382 | return False 383 | 384 | for program, mode in config['dependencies']: 385 | if mode == 'on' and not which(program): 386 | color_print(bcolors.FAIL, "\t\t\t[-] Could not locate {}. Perhaps modifying the PATH variable helps?". 387 | format(program)) 388 | return False 389 | return True 390 | 391 | def func_wrapper(function, *args): 392 | try: 393 | rv = function(*args) 394 | if rv is None or rv: 395 | return True 396 | else: 397 | return False 398 | except: 399 | logging.basicConfig(level=logging.ERROR, filename='.pyexp') 400 | logging.exception('Traceback of an exception raised during Orthrus invocation follows') 401 | return False 402 | 403 | def pprint_decorator_fargs(predicate, prologue, indent=0, fail_msg='failed', success_msg='done'): 404 | 405 | color_print_singleline(bcolors.OKGREEN, '\t'*indent + '[+] {}... '.format(prologue)) 406 | 407 | if not predicate: 408 | color_print(bcolors.FAIL, fail_msg) 409 | return False 410 | else: 411 | color_print(bcolors.OKGREEN, success_msg) 412 | return True 413 | 414 | def pprint_decorator(function, prologue, indent=0, fail_msg='failed', success_msg='done'): 415 | 416 | color_print_singleline(bcolors.OKGREEN, '\t'*indent + '[+] {}... '.format(prologue)) 417 | 418 | try: 419 | rv = function() 420 | if rv is None or rv: 421 | color_print(bcolors.OKGREEN, success_msg) 422 | return True 423 | else: 424 | color_print(bcolors.FAIL, fail_msg) 425 | return False 426 | except: 427 | logging.basicConfig(level=logging.ERROR, filename='.pyexp') 428 | logging.exception('Traceback of an exception raised during Orthrus invocation follows') 429 | color_print(bcolors.FAIL, fail_msg) 430 | return False 431 | 432 | # HT: http://stackoverflow.com/a/600612/4712439 433 | def mkdir_p(path): 434 | try: 435 | os.makedirs(path) 436 | except OSError as exc: # Python >2.5 437 | if exc.errno == errno.EEXIST and os.path.isdir(path): 438 | pass 439 | else: 440 | raise 441 | 442 | def import_test_cases(qdir): 443 | return sorted(glob.glob(qdir + "/id:*")) 444 | 445 | def import_unique_crashes(dir): 446 | return sorted(glob.glob(dir + "/*id:*")) 447 | 448 | def get_asan_report(cmd, strbuf, env=None): 449 | 450 | if not env: 451 | env = os.environ.copy() 452 | 453 | 454 | proc = subprocess.Popen(cmd, shell=True, executable='/bin/bash', 455 | env=env, stdout=open(os.devnull), stderr=subprocess.PIPE) 456 | strbuf.append(proc.communicate()[1]) 457 | 458 | return True 459 | 460 | def overrride_default_afl_asan_options(env): 461 | env.update({'ASAN_OPTIONS': 'abort_on_error=1:detect_leaks=0:symbolize=0:handle_segv=0:allocator_may_return_null=1'}) 462 | 463 | def triage_asan_options(env): 464 | env.update({'ASAN_OPTIONS': 'abort_on_error=1:detect_leaks=0:symbolize=1:handle_segv=0:allocator_may_return_null=1:' 465 | 'disable_core=1'}) 466 | 467 | def spectrum_asan_options(env, extra=None): 468 | if extra: 469 | env.update({'ASAN_OPTIONS': 'abort_on_error=1:detect_leaks=0:symbolize=1:handle_segv=0:allocator_may_return_null=1:' 470 | 'disable_core=1:{}'.format(extra)}) 471 | else: 472 | triage_asan_options(env) 473 | 474 | def runtime_asan_options(env): 475 | env.update({'ASAN_OPTIONS': 'detect_leaks=0:symbolize=1:allocator_may_return_null=1'}) -------------------------------------------------------------------------------- /runtime/GdbExtractor.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Extractor for GDB 3 | ''' 4 | import subprocess 5 | import fcntl 6 | import os 7 | import time 8 | import re 9 | import json 10 | 11 | class GdbExtractor(object): 12 | ''' 13 | Gdb Extractor interfaces Gdb to extract information, 14 | such as arguments for a particular frame 15 | ''' 16 | # _re_gdb_startup = re.compile( r""" 17 | # .+ 18 | # \[New\sLWP\s(?P[0-9]+)\] 19 | # .+ 20 | # Core\swas\sgenerated\sby\s\` 21 | # (?P[A-Za-z0-9./\s\$=_:,+-]+)\'\. 22 | # """, re.VERBOSE | re.DOTALL) 23 | 24 | # _re_gdb_var_info = re.compile( r""" 25 | # ^(?P(\$)?[A-Za-z0-9_?]+) 26 | # \s=\s 27 | # (\(.+?\)\s)? 28 | # (?P((@)?0x[A-Fa-f0-9]+)?([0-9\-]+)?(<[a-z\s]+>)?([A-Za-z0-9_?]+)?)(:)? 29 | # (\s)?(<[a-z\.]+>\s)? 30 | # ((?P(\"|\').*?(\"$|\'$|>$))? 31 | # (?P(\{).+?(>$|\}$))?)? 32 | # """, re.VERBOSE | re.DOTALL | re.MULTILINE) 33 | # 34 | # _re_gdb_var_info_comma_sep = re.compile( r""" 35 | # (?P(\$)?[A-Za-z0-9_?]+) 36 | # \s=\s 37 | # (\(.+?\)\s)? 38 | # (?P((@)?0x[A-Fa-f0-9]+)?([0-9\-]+)?(\s)?(<[a-z\s]+>)?([A-Za-z0-9_?]+)?)(:)? 39 | # (<[a-z\.]+>)?((\s)? 40 | # (?P(\"|\').*?(\"|\'|>))((?=([.]+)?,\s[^"^'])|\}|$))?((\s)? 41 | # (?P(\{).+?([\"\'\}]+))(?=;\s|$))? 42 | # """, re.VERBOSE | re.DOTALL) 43 | # 44 | # _re_gdb_type_info = re.compile( r""" 45 | # ^type\s=\s 46 | # (?P(struct\s|class\s)?(([A-Za-z0-9_<>, ]+::)+)?[A-Za-z0-9\_ ]+) # Optional Namespace and Base type 47 | # (\s)?(.+\}(\s)?)?((?P[0-9\[\]\*\&]+))? # Type modifier (Array, Ptr, Reference) 48 | # """, re.VERBOSE | re.DOTALL); 49 | # 50 | # _re_gdb_blob_normalize = re.compile(r""" 51 | # (\"|\')(?P.+?)(\",\s|\"$|\'\s)([0-9]+)\stimes>)? 52 | # """, re.VERBOSE | re.DOTALL) 53 | 54 | _re_gdb_bt = re.compile(r""" 55 | ^\s*\#(?P[0-9]+)\s* 56 | (?P
0x[A-Fa-f0-9]+)\s 57 | (in\s)?((?P[A-Za-z0-9_:\?<>,\s]+) 58 | ((?P\([A-Za-z0-9_:\&\,\s\*]*\)))?\s)? 59 | ((?P.+?):(?P[0-9]+)(:(?P[0-9]+))?)? 60 | ((\s)?\((?P.+?)\+(?P0x[A-Fa-f0-9]+)\))? 61 | """, re.MULTILINE | re.VERBOSE) 62 | 63 | _re_gdb_signal = re.compile(r""".*Program received signal (?P\w+),.*""", re.DOTALL) 64 | _re_exp_class = re.compile(r""".*Exploitability Classification: (?P\w+)\s*""", re.DOTALL) 65 | _re_exp_others = re.compile(r""".*Other tags: (?P[A-Za-z0-9()/\s]+)\nFaulting.*""", re.DOTALL) 66 | _re_fault_info = re.compile(r""" 67 | Faulting mem location is (?P0x[A-Fa-f0-9]+), # Fault mem addr 68 | pc is (?P0x[A-Fa-f0-9]+), # Faulting PC 69 | esp is (?P0x[A-Fa-f0-9]+), # Stack pointer 70 | ebp is (?P0x[A-Fa-f0-9]+) # Base pointer 71 | """, re.VERBOSE) 72 | 73 | def __init__(self, program, params, jsonfile): 74 | ''' 75 | Constructor 76 | ''' 77 | self._pid = int(0) 78 | self._cmd_line = "" 79 | self.jsonfile = jsonfile 80 | 81 | ''' 82 | Requires ~/.gdbinit to have something like this (basically rc0r's exploitable patch + some scripted commands): 83 | set auto-load safe-path / 84 | define hook-quit 85 | set confirm off 86 | end 87 | define printfault 88 | printf "Faulting mem location is %#lx, pc is %#lx, esp is %#x, ebp is %#x\n", $_siginfo._sifields._sigfault.si_addr, $pc, $esp, $ebp 89 | end 90 | source /home/users/bshastry/.local/lib/python3.5/site-packages/exploitable-1.32_rcor-py3.5.egg/exploitable/exploitable.py 91 | set pagination off 92 | ''' 93 | 94 | self.p = subprocess.Popen(['gdb', '-q', '-ex=set args {}'.format(params), '-ex=r', 95 | '-ex=exploitable', '-ex=printfault', '-ex=bt', '-ex=quit', program], 96 | stdout=subprocess.PIPE, stderr=subprocess.STDOUT) 97 | # fl = fcntl.fcntl(self.p.stdout, fcntl.F_GETFL) 98 | # fcntl.fcntl(self.p.stdout, fcntl.F_SETFL, fl | os.O_NONBLOCK) 99 | 100 | def run(self): 101 | # FIXME: Error handling if necessary 102 | diag_report = self.p.communicate()[0] 103 | match = self._re_gdb_signal.match(diag_report) 104 | if match is not None: 105 | self._signal = match.group("sig") 106 | match = self._re_exp_class.search(diag_report) 107 | if match is not None: 108 | self._classification = match.group("classification") 109 | match = self._re_exp_others.search(diag_report) 110 | if match is not None: 111 | self._tags = match.group("tags") 112 | match = self._re_fault_info.match(diag_report) 113 | if match is not None: 114 | self._faultaddr = match.group("faddr") 115 | self._faultpc = match.group("pc") 116 | self._esp = match.group("esp") 117 | self._ebp = match.group("ebp") 118 | # Backtrace of fault 119 | # start = diag_report.find(" #") 120 | # end = 0 121 | # for line in diag_report[start:].splitlines(): 122 | # if line.startswith(" #"): 123 | # end = diag_report.find(line) 124 | # end += diag_report[end:].find("\n") 125 | # else: 126 | # break 127 | 128 | self._fault_bt = {} 129 | for match in self._re_gdb_bt.finditer(diag_report): 130 | frame_no, address, func, paramlist, filename, line, column, module, offset = match.group("frame_no", 131 | "address", "func", 132 | "paramlist", 133 | "file", "line", 134 | "column", "module", 135 | "offset") 136 | frame_str = "frame{}".format(frame_no) 137 | 138 | self._fault_bt[frame_str] = {"frame_no": frame_no, 139 | "address": address, 140 | "function": func, 141 | "func_params": paramlist, 142 | } 143 | if filename and line and column: 144 | self._fault_bt[frame_str]['file'] = filename 145 | self._fault_bt[frame_str]['line'] = line 146 | self._fault_bt[frame_str]['column'] = column 147 | if module and offset: 148 | self._fault_bt[frame_str]['module'] = module 149 | self._fault_bt[frame_str]['offset'] = offset 150 | self.jsonify() 151 | return True 152 | 153 | def serialize(self): 154 | serial_dict = {} 155 | for key, value in self.__dict__.iteritems(): 156 | attr = getattr(self, key) 157 | if not key.startswith('__') and not callable(attr) and not type(attr) is staticmethod: 158 | if type(value) is str or type(value) is int: 159 | serial_dict[key] = value 160 | if type(value) is dict and key is '_fault_bt': 161 | serial_dict[key] = value 162 | return serial_dict 163 | 164 | def jsonify(self): 165 | with open(self.jsonfile, 'w') as file: 166 | json.dump(self.serialize(), file, indent=4) 167 | 168 | def _read_one_shot(self, blocking = 0.09): 169 | with open('.orthrus_gdbout', 'r') as fp: 170 | return fp.read() 171 | 172 | # buf = '' 173 | # try: 174 | # buf = self.p.stdout.read() 175 | # except IOError: 176 | # time.sleep(blocking) 177 | # self.p.stdout.flush() 178 | # finally: 179 | # if not buf: 180 | # buf = self.p.stdout.read() 181 | # return buf 182 | 183 | # def _send(self, cmd): 184 | # self.p.stdin.write(cmd + '\n') 185 | # self.p.stdin.flush() 186 | # 187 | # def _readAll(self): 188 | # blocking = 0.001 189 | # return self._read(blocking) 190 | # 191 | # def _read(self, blocking = 0): 192 | # buf = '' 193 | # while True: 194 | # try: 195 | # buf += self.p.stdout.read() 196 | # except IOError: 197 | # time.sleep(blocking) 198 | # self.p.stdout.flush() 199 | # if buf.find("(gdb)") > -1: 200 | # break 201 | # return buf[0:buf.find("(gdb)") - 1] 202 | # 203 | # def _selectFrameByName(self, function): 204 | # if function == None: 205 | # return False 206 | # self._send("frame 0") 207 | # while True: 208 | # data = self._readAll() 209 | # if "you cannot go up." in data: 210 | # return False 211 | # if " " + function + " (" in data: 212 | # return True 213 | # if " __interceptor_" + function + " (" in data: 214 | # return True 215 | # self._send("up") 216 | # 217 | # return False 218 | # 219 | # def _getTypeForVar(self, var_name): 220 | # vartype = "" 221 | # self._send("ptype " + var_name) 222 | # typeinfo = self._readAll() 223 | # if "You can't do that without a process to debug." in typeinfo: 224 | # return "" 225 | # 226 | # match = self._re_gdb_type_info.search(typeinfo) 227 | # if match: 228 | # vartype = match.group("type") 229 | # if vartype: 230 | # vartype = vartype.rstrip(" ") 231 | # modifier = match.group("modifier") 232 | # if modifier != None: 233 | # vartype += " " + modifier 234 | # 235 | # return vartype 236 | # 237 | # def _getInfoForVar(self, var_name, ptrBaseType, depth = 1): 238 | # if depth == 0: 239 | # return ("", "") 240 | # 241 | # cmd = "" 242 | # if ptrBaseType: 243 | # cmd = "p *" 244 | # else: 245 | # cmd = "p " 246 | # 247 | # self._send(cmd + var_name) 248 | # rawdata = self._readAll() 249 | # if rawdata.startswith("Attempt to dereference a generic pointer."): 250 | # return ("", "") 251 | # if rawdata.startswith("Cannot access memory at address"): 252 | # return ("", "") 253 | # if rawdata.startswith("No symbol \"operator*\""): 254 | # return ("", "") 255 | # if rawdata.startswith("value has been optimized out"): 256 | # return ("", "") 257 | # 258 | # tmp = "" 259 | # for line in rawdata.splitlines(False): 260 | # tmp += line.lstrip(" ") 261 | # rawdata = tmp 262 | # 263 | # match = self._re_gdb_var_info.search(rawdata) 264 | # value, blob = match.group("value", "blob") 265 | # 266 | # if not blob: 267 | # return ("", (value or "")) 268 | # 269 | # blob_copy = blob 270 | # blob = self._insertBlobTerminator(blob) 271 | # offset = 0 272 | # 273 | # data_dict = OrderedDict() 274 | # for mat in self._re_gdb_var_info_comma_sep.finditer(blob): 275 | # var, val, da, bl = mat.group("var_name", "value", "data", "blob") 276 | # var_end = mat.end("var_name") 277 | # 278 | # ty = self._getTypeForVar(var_name + "." + var) 279 | # ty = "<" + ty + "> " 280 | # blob_copy = blob_copy[:offset + var_end + 3] + ty + blob_copy[offset + var_end + 3:] 281 | # offset += len(ty) 282 | # 283 | # if da and isinstance(da, str) and (" 0: 336 | # normalized += data 337 | # repeat -= 1 338 | # else: 339 | # normalized += data 340 | # 341 | # return normalized + "\"" 342 | # 343 | # def getSymbolsInSourceLineForPc(self, pc): 344 | # sym = list() 345 | # re_vars = re.compile("(?P[a-zA-Z0-9_]+)") 346 | # self._send("list *" + pc + ",*" + pc) 347 | # rawdata = self._readAll().splitlines()[1] 348 | # rawdata = rawdata[rawdata.find(" "):].lstrip(" ") 349 | # 350 | # for match in re_vars.finditer(rawdata): 351 | # sym.append(match.group("symbol")) 352 | # 353 | # return sym 354 | # 355 | # def getArglistByFuncName(self, function): 356 | # args = OrderedDict() 357 | # if self._selectFrameByName(function) == False: 358 | # return OrderedDict() 359 | # 360 | # self._send("info args") 361 | # rawdata = self._readAll() 362 | # if "No arguments" in rawdata: 363 | # return OrderedDict() 364 | # 365 | # for match in self._re_gdb_var_info.finditer(rawdata): 366 | # var_name, value, data, blob = match.group("var_name", "value", "data", "blob") 367 | # vartype = self._getTypeForVar(var_name) 368 | # 369 | # # Try to derefence pointer to extract more information 370 | # if not blob and not data and ("0x" in value) and ("*" in vartype): 371 | # blob, data = self._getInfoForVar(var_name, True, 5) 372 | # 373 | # if blob and (not value or "@" in value) and ("*" not in vartype): 374 | # blob, data = self._getInfoForVar(var_name, False, 5) 375 | # 376 | # if data and isinstance(data, str) and (" [^:]+):(?P\d+)\s" \ 27 | # "(?P[\w|\-|\:]+)$", re.MULTILINE) 28 | # 29 | line_cov_regex = re.compile(r"^(?P[\w|\-|\:]+)$\n" 30 | r"^(?P[^:]+):(?P\d+):(?P\d+)$", 31 | re.MULTILINE) 32 | 33 | # Is_Crash_Regex = re.compile(r"id.*,(sig:\d{2}),.*") 34 | # find_crash_parent_regex = re.compile(r"^(HARDEN\-|ASAN\-)?(?P[\w|\-]+):id.*?" 35 | # r"(sync:(?P[\w|\-]+))?,src:(?P\d+).*$") 36 | 37 | 38 | # This regex merges legacy Is_Crash_Regex and namesake 39 | # old_find_crash_parent_regex = re.compile(r"^(HARDEN\-|ASAN\-)?((?P[\w|\-]+):)?id:\d+,sig:\d+," 40 | # r"(sync:(?P[\w|\-]+),)?src:(?P\d+).*$") 41 | find_queue_parent_regex = re.compile(r"id:\d+,(sync:(?P[\w|\-]+),)?src:(?P\d+).*$") 42 | 43 | # For proposed successor of current naming convention 44 | find_crash_parent_regex = re.compile(r"^(HARDEN:|ASAN:)?((?P[\w|\-]+):)?id:\d+,sig:\d+," 45 | r"(sync:(?P[\w|\-]+),)?src:(?P\d+).*$") 46 | 47 | 48 | def __init__(self, parsed_args, cov_cmd, bin_path, crash_dir, afl_out, sanitizer): 49 | 50 | self.args = parsed_args 51 | self.coverage_cmd = cov_cmd 52 | self.bin_path = bin_path 53 | self.crash_dir = crash_dir 54 | self.afl_fuzzing_dir = afl_out 55 | self.sanitizer = sanitizer 56 | 57 | self.cov_paths = {} 58 | 59 | ### global coverage tracking dictionary 60 | self.global_pos_report = set() 61 | self.global_zero_report = set() 62 | 63 | ### For diffs between two consecutive queue files 64 | self.curr_pos_report = set() 65 | self.curr_zero_report = set() 66 | self.prev_pos_report = set() 67 | self.prev_zero_report = set() 68 | 69 | ### For use in dd-mode 70 | self.crashdd_pos_report = set() 71 | # self.crashdd_zero_report = set() 72 | self.parentdd_pos_report = set() 73 | # self.parentdd_zero_report = set() 74 | 75 | ### List of all tuples singularly in crash positive reports 76 | self.crashdd_pos_list = [] 77 | 78 | def setup_parsing(self): 79 | self.bin_name = os.path.basename(self.bin_path) 80 | self.sancov_filename_regex = re.compile(r"%s.\d+.sancov" % self.bin_name) 81 | 82 | def run(self): 83 | if self.args.version: 84 | print "afl-sancov-" + self.Version 85 | return 0 86 | 87 | if not self.validate_args(): 88 | return 1 89 | 90 | if not self.init_tracking(): 91 | return 1 92 | 93 | self.setup_parsing() 94 | 95 | if self.args.dd_num == 1: 96 | rv = self.process_afl_crashes() 97 | else: 98 | rv = self.process_afl_crashes_deep() 99 | 100 | return not rv 101 | 102 | def join_tuples_in_list(self, list): 103 | for idx, tpl in enumerate(list): 104 | list[idx] = ':'.join(str(val) for val in tpl) 105 | return 106 | 107 | def jsonify_slice(self, slice_list, crashfile, jsonfilename): 108 | dict = {"crashing-input": crashfile, "slice-node-spec": []} 109 | self.join_tuples_in_list(slice_list) 110 | counter = collections.Counter(slice_list) 111 | sorted_list = counter.most_common() 112 | for tpl in sorted_list: 113 | dict['slice-node-spec'].append({'line': tpl[0], 'count': tpl[1]}) 114 | dict['slice-linecount'] = len(sorted_list) 115 | 116 | self.jsonify_dict(jsonfilename, dict) 117 | 118 | def jsonify_dice(self, crashfile, jsonfilename, parentfile=None): 119 | 120 | if parentfile: 121 | dict = {"crashing-input": crashfile, "parent-input": parentfile, "diff-node-spec": []} 122 | else: 123 | dict = {"crashing-input": crashfile, "diff-node-spec": []} 124 | 125 | self.join_tuples_in_list(self.crashdd_pos_list) 126 | 127 | counter = collections.Counter(self.crashdd_pos_list) 128 | 129 | sorted_list = counter.most_common() 130 | for tpl in sorted_list: 131 | dict['diff-node-spec'].append({'line': tpl[0], 'count': tpl[1]}) 132 | 133 | # self.prev_pos_report contains crash file's exec slice 134 | slice_linecount = len(self.prev_pos_report) 135 | dice_linecount = len(sorted_list) 136 | 137 | dict['slice-linecount'] = slice_linecount 138 | dict['dice-linecount'] = dice_linecount 139 | dict['shrink-percent'] = 100 - (float(dice_linecount)/slice_linecount)*100 140 | 141 | self.jsonify_dict(jsonfilename, dict) 142 | 143 | return 144 | 145 | def jsonify_dict(self, filename, dict): 146 | with open(filename, "w") as file: 147 | json.dump(dict, file, indent=4) 148 | 149 | def write_dice_as_json(self, cbasename, pbasename=None): 150 | crashdd_outfile = self.cov_paths['dice_dir'] + '/' + cbasename + '.json' 151 | 152 | # header = "diff crash ({}) -> parent ({})".format(cbasename, pbasename) 153 | # self.write_file(header, crashdd_outfile) 154 | if pbasename: 155 | self.jsonify_dice(cbasename, crashdd_outfile, pbasename) 156 | else: 157 | self.jsonify_dice(cbasename, crashdd_outfile) 158 | 159 | ## Reset state to be safe 160 | self.crashdd_pos_list = [] 161 | 162 | def cleanup(self): 163 | ### Stash away all raw sancov files 164 | stash_dst = self.cov_paths['dd_stash_dir'] 165 | if os.path.isdir(stash_dst): 166 | for file in sorted(glob.glob(self.cov_paths['dice_dir'] + '/*.sancov')): 167 | os.rename(file, stash_dst + '/' + os.path.basename(file)) 168 | 169 | # Remove covered.txt 170 | covered = self.cov_paths['dice_dir'] + '/covered.txt' 171 | if os.path.isfile(covered): 172 | os.remove(covered) 173 | 174 | def parent_identical_or_crashes(self, crash, parent): 175 | 176 | # Base names 177 | cbasename = os.path.basename(crash) 178 | pbasename = os.path.basename(parent) 179 | 180 | ## Filter queue filenames with sig info 181 | if self.find_crash_parent_regex.match(pbasename): 182 | self.logr("Parent ({}) looks like crashing input!".format(pbasename)) 183 | return True 184 | 185 | try: 186 | diff_out = subprocess.check_output("diff -q {} {}".format(crash, parent), 187 | stderr=subprocess.STDOUT, shell=True) 188 | except Exception, e: 189 | diff_out = e.output 190 | 191 | if not diff_out.rstrip("\n"): 192 | self.logr("Crash file ({}) and parent ({}) are identical!" 193 | .format(cbasename, pbasename)) 194 | return True 195 | 196 | cov_cmd = self.coverage_cmd.replace('AFL_FILE', parent) 197 | 198 | ### Dry-run to make sure parent doesn't cause a crash 199 | if self.does_dry_run_throw_error(cov_cmd): 200 | self.logr("Parent ({}) crashes binary!".format(pbasename)) 201 | return True 202 | 203 | return False 204 | 205 | def generate_cov_for_parent(self, parent_fname): 206 | pbasename = os.path.basename(parent_fname) 207 | 208 | #### The output should be written to delta-diff dir 209 | #### as afl_input namesake witha sancov extension 210 | ### raw sancov file 211 | self.cov_paths['parent_sancov_raw'] = self.cov_paths['dice_dir'] + \ 212 | '/' + pbasename + '.sancov' 213 | self.cov_paths['parent_afl'] = pbasename 214 | 215 | cov_cmd = self.coverage_cmd.replace('AFL_FILE', parent_fname) 216 | ### execute the command to generate code coverage stats 217 | ### for the current AFL test case file 218 | sancov_env = self.get_sancov_env(self.cov_paths['parent_sancov_raw'], pbasename) 219 | 220 | self.run_cmd(cov_cmd, self.No_Output, sancov_env) 221 | 222 | if self.args.sancov_bug: 223 | sancovfile = "".join(glob.glob("*.sancov")) 224 | cov_cmd = 'mv {} {}'.format(sancovfile, self.cov_paths['dice_dir']) 225 | self.run_cmd(cov_cmd, self.No_Output) 226 | 227 | # This renames default sancov file to specified filename 228 | # and populates self.curr* report with non-crashing input's 229 | # linecov info. 230 | if not self.rename_and_extract_linecov(self.cov_paths['parent_sancov_raw']): 231 | self.logr("Error generating cov info for parent {}".format(pbasename)) 232 | return False 233 | 234 | return True 235 | 236 | def generate_cov_for_crash(self, crash_fname): 237 | 238 | cbasename = os.path.basename(crash_fname) 239 | 240 | self.cov_paths['crash_sancov_raw'] = self.cov_paths['dice_dir'] + \ 241 | '/' + cbasename + '.sancov' 242 | 243 | self.cov_paths['crash_afl'] = cbasename 244 | 245 | ### Make sure crashing input indeed triggers a program crash 246 | cov_cmd = self.coverage_cmd.replace('AFL_FILE', crash_fname) 247 | 248 | if not self.does_dry_run_throw_error(cov_cmd): 249 | self.logr("Crash input ({}) does not crash the program! Filtering crash file." 250 | .format(cbasename)) 251 | os.rename(crash_fname, self.cov_paths['dd_filter_dir'] + '/' + cbasename) 252 | return False 253 | 254 | ### execute the command to generate code coverage stats 255 | ### for the current AFL test case file 256 | sancov_env = self.get_sancov_env(self.cov_paths['crash_sancov_raw'], cbasename) 257 | 258 | self.run_cmd(cov_cmd, self.No_Output, sancov_env) 259 | 260 | if self.args.sancov_bug: 261 | rawfilename = "".join(glob.glob("*.sancov.raw")) 262 | mapfilename = "".join(glob.glob("*.sancov.map")) 263 | 264 | cov_cmd = 'mv {} {} {}'.format(rawfilename, mapfilename, 265 | self.cov_paths['dice_dir']) 266 | self.run_cmd(cov_cmd, self.No_Output) 267 | 268 | globstrraw = os.path.basename("".join(glob.glob(self.cov_paths['dice_dir'] + "/*.sancov.raw"))) 269 | globstrmap = os.path.basename("".join(glob.glob(self.cov_paths['dice_dir'] + "/*.sancov.map"))) 270 | ### Run pysancov rawunpack before calling rename 271 | self.run_cmd("cd {}; pysancov rawunpack {} ; rm {} {}".format(self.cov_paths['dice_dir'], 272 | globstrraw, globstrraw, globstrmap), 273 | self.No_Output) 274 | # self.run_cmd("cd pysancov rawunpack " + globstrraw + " ; rm " + globstrraw + " " + globstrmap, self.No_Output) 275 | 276 | # This renames default sancov file to specified filename 277 | # and populates self.curr* report with non-crashing input's 278 | # linecov info. 279 | if not self.rename_and_extract_linecov(self.cov_paths['crash_sancov_raw']): 280 | self.logr("Error generating coverage info for crash file {}".format(cbasename)) 281 | return False 282 | 283 | return True 284 | 285 | def process_afl_crashes_deep(self): 286 | 287 | ''' 288 | 1. Process crash file 289 | 2. Pick and process crash file's parent and N other randomly selected queue files 290 | 3. Do a repeated intersection of s.difference(t) 291 | :return: 292 | ''' 293 | 294 | crash_files = import_unique_crashes(self.crash_dir) 295 | num_crash_files = len(crash_files) 296 | 297 | self.logr("\n*** Imported %d new crash files from: %s\n" \ 298 | % (num_crash_files, (self.afl_fuzzing_dir + '/unique'))) 299 | 300 | if not self.import_afl_dirs(): 301 | return False 302 | 303 | fuzzdirs = self.cov_paths['dirs'].keys() 304 | queue_files = [] 305 | for val in fuzzdirs: 306 | queue_files.extend(import_test_cases(val + '/queue')) 307 | 308 | crash_file_counter = 0 309 | 310 | for crash_fname in crash_files: 311 | 312 | crash_file_counter += 1 313 | self.logr("[+] Processing crash file ({}/{})".format(crash_file_counter, num_crash_files)) 314 | 315 | cbasename = os.path.basename(crash_fname) 316 | 317 | if not self.generate_cov_for_crash(crash_fname): 318 | continue 319 | 320 | # Store this in self.prev_pos_report 321 | self.prev_pos_report = self.curr_pos_report 322 | 323 | queue_cnt = 0 324 | # Find parent 325 | pname = self.find_parent_crashing(crash_fname) 326 | 327 | while queue_cnt < self.args.dd_num: 328 | 329 | if queue_cnt > 0: 330 | pname = self.find_queue_parent(pname) 331 | if not pname: 332 | self.logr("Cannot find ancestors of crash file {}. Bailing out".format(cbasename)) 333 | break 334 | 335 | while pname and self.parent_identical_or_crashes(crash_fname, pname): 336 | self.logr("Looking up ancestors of crash file {}".format(cbasename)) 337 | pname = self.find_queue_parent(pname) 338 | 339 | if not pname: 340 | self.logr("Cannot find ancestors of crash file {}. Bailing out".format(cbasename)) 341 | break 342 | 343 | # Select a random queue file 344 | # pname = random.choice(queue_files) 345 | 346 | # if self.parent_identical_or_crashes(crash_fname, pname): 347 | # self.logr("Skipping parent of crash file {}".format(cbasename)) 348 | # continue 349 | 350 | if not self.generate_cov_for_parent(pname): 351 | self.logr("Error generating cov info for parent of {}".format(cbasename)) 352 | continue 353 | 354 | # Increment queue_cnt 355 | queue_cnt += 1 356 | self.logr("Processing parent {}/{}".format(queue_cnt, self.args.dd_num)) 357 | 358 | # Obtain Pc.difference(Pnc) and write to file 359 | self.crashdd_pos_report = self.prev_pos_report.difference(self.curr_pos_report) 360 | self.crashdd_pos_report = sorted(self.crashdd_pos_report, \ 361 | key=lambda cov_entry: (cov_entry[0], cov_entry[2], cov_entry[3])) 362 | 363 | # Extend the global list with current crash delta diff 364 | self.crashdd_pos_list.extend(self.crashdd_pos_report) 365 | 366 | self.write_dice_as_json(cbasename) 367 | # Write Crash coverage to slice dir 368 | self.jsonify_slice(list(self.prev_pos_report), cbasename, 369 | self.cov_paths['slice_dir'] + '/' + cbasename + '.json') 370 | 371 | self.cleanup() 372 | return True 373 | 374 | def process_afl_crashes(self): 375 | 376 | ''' 377 | 1. Process crash file 378 | 2. Pick and process crash file's parent 379 | 3. Do a s.difference(t) 380 | :return: 381 | ''' 382 | 383 | crash_files = import_unique_crashes(self.crash_dir) 384 | num_crash_files = len(crash_files) 385 | 386 | self.logr("\n*** Imported %d new crash files from: %s\n" \ 387 | % (num_crash_files, (self.afl_fuzzing_dir + '/unique'))) 388 | 389 | crash_file_counter = 0 390 | 391 | for crash_fname in crash_files: 392 | 393 | crash_file_counter += 1 394 | self.logr("[+] Processing crash file ({}/{})".format(crash_file_counter, num_crash_files)) 395 | 396 | # Find parent 397 | pname = self.find_parent_crashing(crash_fname) 398 | cbasename = os.path.basename(crash_fname) 399 | 400 | ### AFL corpus sometimes contains parent file that is identical to crash file 401 | ### or a parent (in queue) that also crashes the program. In case we bump into 402 | ### such parents, we try to recursively find their parent i.e., the crash file's 403 | ### ancestor. 404 | while pname and self.parent_identical_or_crashes(crash_fname, pname): 405 | self.logr("Looking up ancestors of crash file {}".format(cbasename)) 406 | pname = self.find_queue_parent(pname) 407 | 408 | if not pname: 409 | self.logr("No non-identical parent of crash file {} found. Bailing out!".format(cbasename)) 410 | continue 411 | 412 | pbasename = os.path.basename(pname) 413 | 414 | if not self.generate_cov_for_parent(pname): 415 | self.logr("Error generating cov info for parent of {}".format(cbasename)) 416 | continue 417 | 418 | self.prev_pos_report = self.curr_pos_report 419 | self.prev_zero_report = self.curr_zero_report 420 | 421 | if not self.generate_cov_for_crash(crash_fname): 422 | continue 423 | 424 | # Obtain Pc.difference(Pnc) and write to file 425 | self.crashdd_pos_report = self.curr_pos_report.difference(self.prev_pos_report) 426 | 427 | self.crashdd_pos_list = sorted(self.crashdd_pos_report, \ 428 | key=lambda cov_entry: (cov_entry[0], cov_entry[2], cov_entry[3])) 429 | 430 | self.write_dice_as_json(cbasename, pbasename) 431 | 432 | # Write Crash coverage to slice dir 433 | self.jsonify_slice(list(self.curr_pos_report), cbasename, 434 | self.cov_paths['slice_dir'] + '/' + cbasename + '.json') 435 | 436 | self.cleanup() 437 | return True 438 | 439 | def get_parent(self, filepath, isCrash=True): 440 | 441 | dirname, basename = os.path.split(filepath) 442 | 443 | if isCrash: 444 | match = self.find_crash_parent_regex.match(basename) 445 | # (_, _, session, _, syncname, src_id) = match.groups() 446 | (_, _, session, _, syncname, src_id) = match.groups() 447 | 448 | searchdir = self.afl_fuzzing_dir 449 | # if syncname: 450 | # searchdir += '/' + syncname + '/queue' 451 | if session: 452 | searchdir += '/' + session + '/queue' 453 | else: 454 | assert False, "Parent of crash file {} cannot be found".format(basename) 455 | 456 | 457 | else: 458 | match = self.find_queue_parent_regex.match(basename) 459 | if not match: 460 | self.logr("No parent could be found for {}".format(basename)) 461 | return None 462 | 463 | (_, syncname, src_id) = match.groups() 464 | 465 | searchdir = dirname 466 | 467 | if syncname: 468 | searchdir += '/../../' + syncname + '/queue' 469 | 470 | 471 | search_cmd = "find " + searchdir + " -maxdepth 1" + " -name id:" + src_id + "*" 472 | 473 | parent_fname = subprocess.check_output(search_cmd, stderr=subprocess.STDOUT, shell=True) 474 | 475 | parent_list = filter(None, parent_fname.split("\n")) 476 | if (len(parent_list) == 0): 477 | self.logr("No parents found for file {}".format(basename)) 478 | return None 479 | 480 | if (len(parent_list) > 1): 481 | self.logr("Multiple parents found for file {}. Selecting first.".format(basename)) 482 | 483 | return os.path.abspath(parent_list[0].rstrip("\n")) 484 | 485 | def find_queue_parent(self, queue_fname): 486 | return self.get_parent(queue_fname, False) 487 | 488 | def find_parent_crashing(self, crash_fname): 489 | return self.get_parent(crash_fname) 490 | 491 | def init_tracking(self): 492 | 493 | self.cov_paths['top_dir'] = self.afl_fuzzing_dir + '/../crash-analysis/spectrum/{}'.format(self.sanitizer) 494 | self.cov_paths['log_file'] = self.cov_paths['top_dir'] + '/afl-sancov.log' 495 | self.cov_paths['tmp_out'] = self.cov_paths['top_dir'] + '/cmd-out.tmp' 496 | 497 | ### global coverage results 498 | self.cov_paths['zero_cov'] = self.cov_paths['top_dir'] + '/zero-cov' 499 | self.cov_paths['pos_cov'] = self.cov_paths['top_dir'] + '/pos-cov' 500 | 501 | self.cov_paths['dirs'] = {} 502 | self.cov_paths['parent_afl'] = '' 503 | self.cov_paths['crash_afl'] = '' 504 | self.cov_paths['parent_sancov_raw'] = '' 505 | self.cov_paths['crash_sancov_raw'] = '' 506 | # Diff in delta debug mode 507 | self.cov_paths['slice_dir'] = self.cov_paths['top_dir'] + '/slice' 508 | self.cov_paths['dice_dir'] = self.cov_paths['top_dir'] + '/dice' 509 | self.cov_paths['dd_stash_dir'] = self.cov_paths['dice_dir'] + '/.raw' 510 | self.cov_paths['dd_filter_dir'] = self.cov_paths['dice_dir'] + '/.filter' 511 | self.cov_paths['dd_final_stats'] = self.cov_paths['dice_dir'] + '/final_stats.dd' 512 | 513 | if self.args.overwrite: 514 | self.init_mkdirs() 515 | else: 516 | if self.is_dir(self.cov_paths['top_dir']): 517 | print "[*] Existing coverage dir %s found, use --overwrite to " \ 518 | "re-calculate coverage" % (self.cov_paths['top_dir']) 519 | return False 520 | else: 521 | self.init_mkdirs() 522 | 523 | self.write_status(self.cov_paths['top_dir'] + '/afl-sancov-status') 524 | return True 525 | 526 | def import_afl_dirs(self): 527 | 528 | if not self.afl_fuzzing_dir: 529 | print "[*] Must specify AFL fuzzing dir with --afl-fuzzing-dir or -d" 530 | return False 531 | 532 | assert 'top_dir' in self.cov_paths, "Trying to import fuzzing data without sancov dir" 533 | 534 | def_dir = self.afl_fuzzing_dir 535 | 536 | if self.is_dir(def_dir + '/queue'): 537 | if def_dir not in self.cov_paths['dirs']: 538 | self.add_fuzz_dir(def_dir) 539 | else: 540 | for p in os.listdir(def_dir): 541 | fuzz_dir = def_dir + '/' + p 542 | if self.is_dir(fuzz_dir): 543 | if self.is_dir(fuzz_dir + '/queue'): 544 | ### found an AFL fuzzing directory instance 545 | if fuzz_dir not in self.cov_paths['dirs']: 546 | self.add_fuzz_dir(fuzz_dir) 547 | 548 | return True 549 | 550 | def get_sancov_env(self, sancov_output, afl_input): 551 | 552 | fpath, fname = os.path.split(sancov_output) 553 | 554 | sancov_env = os.environ.copy() 555 | if self.sanitizer == "asan": 556 | if self.find_crash_parent_regex.match(afl_input): 557 | if not self.args.sancov_bug: 558 | spectrum_asan_options(sancov_env, 'coverage=1:coverage_direct=1:coverage_dir={}'.format(fpath)) 559 | else: 560 | spectrum_asan_options(sancov_env, 'coverage=1:coverage_direct=1') 561 | else: 562 | if not self.args.sancov_bug: 563 | spectrum_asan_options(sancov_env, 'coverage=1:coverage_dir={}'.format(fpath)) 564 | else: 565 | spectrum_asan_options(sancov_env, 'coverage=1') 566 | else: 567 | if self.find_crash_parent_regex.match(afl_input): 568 | if not self.args.sancov_bug: 569 | sancov_env['UBSAN_OPTIONS'] = 'coverage=1:coverage_direct=1:' \ 570 | 'coverage_dir=%s' % fpath 571 | else: 572 | sancov_env['UBSAN_OPTIONS'] = 'coverage=1:coverage_direct=1' 573 | else: 574 | if not self.args.sancov_bug: 575 | sancov_env['UBSAN_OPTIONS'] = 'coverage=1:coverage_dir=%s' % fpath 576 | else: 577 | sancov_env['UBSAN_OPTIONS'] = 'coverage=1' 578 | 579 | return sancov_env 580 | 581 | # Rename ..sancov to user-supplied `sancov_fname` 582 | # Extract linecov info into self.curr* report 583 | def rename_and_extract_linecov(self, sancov_fname): 584 | out_lines = [] 585 | 586 | # Raw sancov file in fpath 587 | fpath, fname = os.path.split(sancov_fname) 588 | # Find and rename sancov file 589 | if not self.find_sancov_file_and_rename(fpath, sancov_fname): 590 | return False 591 | 592 | # Positive line coverage 593 | # sancov -obj torture_test -print torture_test.28801.sancov 2>/dev/null | llvm-symbolizer -obj torture_test > out 594 | out_lines = self.run_cmd(self.args.sancov_path \ 595 | + " -obj " + self.bin_path \ 596 | + " -print " + sancov_fname \ 597 | + " 2>/dev/null" \ 598 | + " | " + self.args.llvm_sym_path \ 599 | + " -obj " + self.bin_path, 600 | self.Want_Output) 601 | 602 | # Pos line coverage 603 | # self.write_file("\n".join(out_lines), cp['pos_line_cov']) 604 | # In-memory representation 605 | self.curr_pos_report = self.linecov_report("\n".join(out_lines)) 606 | if not self.curr_pos_report: 607 | return False 608 | 609 | # Zero line coverage 610 | # pysancov print cp['sancov_raw'] > covered.txt 611 | # pysancov missing bin_path < covered.txt 2>/dev/null | llvm-symbolizer -obj bin_path > cp['zero_line_cov'] 612 | covered = os.path.join(fpath, "covered.txt") 613 | out_lines = self.run_cmd(self.args.pysancov_path \ 614 | + " print " + sancov_fname + " > " + covered + ";" \ 615 | + " " + self.args.pysancov_path + " missing " + self.bin_path \ 616 | + " < " + covered + " 2>/dev/null | " \ 617 | + self.args.llvm_sym_path + " -obj " + self.bin_path, 618 | self.Want_Output) 619 | self.curr_zero_report = self.linecov_report("\n".join(out_lines)) 620 | 621 | # Pos func coverage 622 | # sancov -demangle -obj bin_path -covered-functions cp['sancov_raw'] 2>/dev/null 623 | # out_lines = self.run_cmd(self.args.sancov_path \ 624 | # + " -demangle" + " -obj " + self.bin_path \ 625 | # + " -covered-functions " + cp['sancov_raw'] + " 2>/dev/null", 626 | # self.Want_Output) 627 | # # self.write_file("\n".join(out_lines), cp['pos_func_cov']) 628 | # self.curr_reports.append(FuncCov_Report("\n".join(out_lines))) 629 | 630 | # Zero func coverage 631 | # sancov -demangle -obj bin_path -not-covered-functions cp['sancov_raw'] 2>/dev/null 632 | # out_lines = self.run_cmd(self.args.sancov_path \ 633 | # + " -demangle" + " -obj " + self.bin_path \ 634 | # + " -not-covered-functions " + cp['sancov_raw'] + " 2>/dev/null", 635 | # self.Want_Output) 636 | # # self.write_file("\n".join(out_lines), cp['zero_func_cov']) 637 | # self.curr_reports.append(FuncCov_Report("\n".join(out_lines))) 638 | return True 639 | 640 | def linecov_report(self, repstr): 641 | return set((fp, func, ln, col) for (func, fp, ln, col) \ 642 | in re.findall(self.line_cov_regex, repstr)) 643 | # Don't do this if you want to keep sets 644 | # return sorted(s, key=lambda cov_entry: cov_entry[0]) 645 | 646 | def find_sancov_file_and_rename(self, searchdir, newname): 647 | 648 | for filename in os.listdir(searchdir): 649 | match = self.sancov_filename_regex.match(filename) 650 | if match and match.group(0): 651 | src = os.path.join(searchdir, match.group(0)) 652 | if os.path.isfile(src): 653 | os.rename(src, newname) 654 | return True 655 | assert False, "sancov file is a directory!" 656 | 657 | # assert False, "sancov file {} not found!".format(newname) 658 | self.logr("Could not generate coverage info for parent {}. Bailing out!".format(newname)) 659 | return False 660 | 661 | # Credit: http://stackoverflow.com/a/1104641/4712439 662 | def does_dry_run_throw_error(self, cmd): 663 | 664 | env = os.environ.copy() 665 | if self.sanitizer == 'asan': 666 | spectrum_asan_options(env) 667 | 668 | try: 669 | out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True, env=env) 670 | except Exception, e: 671 | # OR condition is bug fix for compute shell returning negative instead of positive code 672 | return (e.returncode > 128 or e.returncode < 0) 673 | 674 | return False 675 | 676 | def run_cmd(self, cmd, collect, env=None): 677 | 678 | out = [] 679 | 680 | if self.args.verbose: 681 | self.logr(" CMD: %s" % cmd) 682 | 683 | fh = None 684 | if self.args.disable_cmd_redirection or collect == self.Want_Output: 685 | fh = open(self.cov_paths['tmp_out'], 'w') 686 | else: 687 | fh = open(os.devnull, 'w') 688 | 689 | if env is None: 690 | subprocess.call(cmd, stdin=None, 691 | stdout=fh, stderr=subprocess.STDOUT, shell=True, executable='/bin/bash') 692 | else: 693 | subprocess.call(cmd, stdin=None, 694 | stdout=fh, stderr=subprocess.STDOUT, shell=True, env=env, executable='/bin/bash') 695 | 696 | fh.close() 697 | 698 | if self.args.disable_cmd_redirection or collect == self.Want_Output: 699 | with open(self.cov_paths['tmp_out'], 'r') as f: 700 | for line in f: 701 | out.append(line.rstrip('\n')) 702 | 703 | return out 704 | 705 | def validate_args(self): 706 | if self.coverage_cmd: 707 | if 'AFL_FILE' not in self.coverage_cmd: 708 | print "[*] --coverage-cmd must contain AFL_FILE" 709 | return False 710 | else: 711 | print "[*] --coverage-cmd missing" 712 | return False 713 | 714 | if not self.afl_fuzzing_dir: 715 | print "[*] --afl-fuzzing-dir missing" 716 | return False 717 | 718 | if not self.crash_dir or not os.path.isdir(self.crash_dir): 719 | print "[*] --crash-dir missing or not a dir" 720 | return False 721 | 722 | if not self.bin_path: 723 | print "[*] Please provide path to coverage " \ 724 | "instrumented binary using the --bin-path argument" 725 | return False 726 | 727 | if not self.which(self.bin_path): 728 | print "[*] Could not find an executable binary in " \ 729 | "--bin-path '%s'" % self.bin_path 730 | return False 731 | 732 | if not self.which(self.args.sancov_path): 733 | print "[*] sancov command not found: %s" % (self.args.sancov_path) 734 | return False 735 | 736 | if not self.which(self.args.pysancov_path): 737 | print "[*] sancov.py script not found: %s" % (self.args.pysancov_path) 738 | return False 739 | 740 | if not self.which(self.args.llvm_sym_path): 741 | print "[*] llvm-symbolizer command not found: %s" % (self.args.llvm_sym_path) 742 | return False 743 | 744 | # if self.args.dd_mode and not self.args.dd_raw_queue_path: 745 | # print "[*] --dd-mode requires --dd-raw-queue-path to be set" 746 | # return False 747 | 748 | # if self.args.dd_mode and not self.args.dd_crash_file: 749 | # print "[*] Pass crashing input to --dd-crash-file" 750 | # return False 751 | 752 | return True 753 | 754 | ### credit: http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python 755 | @staticmethod 756 | def is_exe(fpath): 757 | return os.path.isfile(fpath) and os.access(fpath, os.X_OK) 758 | 759 | @classmethod 760 | def which(cls, prog): 761 | fpath, fname = os.path.split(prog) 762 | if fpath: 763 | if cls.is_exe(prog): 764 | return prog 765 | else: 766 | for path in os.environ["PATH"].split(os.pathsep): 767 | path = path.strip('"') 768 | exe_file = os.path.join(path, prog) 769 | if cls.is_exe(exe_file): 770 | return exe_file 771 | 772 | return None 773 | 774 | def add_fuzz_dir(self, fdir): 775 | self.cov_paths['dirs'][fdir] = {} 776 | self.cov_paths['dirs'][fdir]['prev_file'] = '' 777 | return 778 | 779 | def init_mkdirs(self): 780 | 781 | # lcov renamed cons and delta-diff dir added 782 | create_cov_dirs = 0 783 | if self.is_dir(self.cov_paths['top_dir']): 784 | if self.args.overwrite: 785 | rmtree(self.cov_paths['top_dir']) 786 | create_cov_dirs = 1 787 | else: 788 | create_cov_dirs = 1 789 | 790 | if create_cov_dirs: 791 | for k in ['top_dir']: 792 | os.makedirs(self.cov_paths[k]) 793 | for k in ['slice_dir', 'dice_dir', 'dd_stash_dir', 'dd_filter_dir']: 794 | os.mkdir(self.cov_paths[k]) 795 | return 796 | 797 | @staticmethod 798 | def is_dir(dpath): 799 | return os.path.exists(dpath) and os.path.isdir(dpath) 800 | 801 | def logr(self, pstr): 802 | if not self.args.quiet: 803 | print " " + pstr 804 | self.append_file(pstr, self.cov_paths['log_file']) 805 | return 806 | 807 | @staticmethod 808 | def append_file(pstr, path): 809 | f = open(path, 'a') 810 | f.write("%s\n" % pstr) 811 | f.close() 812 | return 813 | 814 | @classmethod 815 | def write_status(cls, status_file): 816 | f = open(status_file, 'w') 817 | f.write("afl_sancov_pid : %d\n" % os.getpid()) 818 | f.write("afl_sancov_version : %s\n" % cls.Version) 819 | f.write("command_line : %s\n" % ' '.join(argv)) 820 | f.close() 821 | return -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/test-pipeline/orthrus/7e916f36ceffcc4fdd9013a4d952649f69738aa8/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_early_exit.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrus.commands import * 3 | from orthrusutils.orthrusutils import * 4 | 5 | class TestOrthrusShow(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | config = {'orthrus': {'directory': orthrusdirname}} 10 | routineconf_file = orthrusdirname + '/conf/routineconf.conf' 11 | 12 | # Create 13 | def test_create_early_exit(self): 14 | args = parse_cmdline(self.description, ['create', '-asan']) 15 | cmd = OrthrusCreate(args, self.config) 16 | cmd.run() 17 | cmd = OrthrusCreate(args, self.config, True) 18 | self.assertTrue(cmd.run()) 19 | shutil.rmtree(self.orthrusdirname) 20 | 21 | # Add 22 | def test_add_early_exit(self): 23 | routineconf_dict = {'fuzzer': 'afl-fuzz', 'fuzzer_args': ''} 24 | util.mkdir_p(self.orthrusdirname + '/conf') 25 | with open(self.routineconf_file, 'w') as routineconf_fp: 26 | json.dump(routineconf_dict, routineconf_fp, indent=4) 27 | 28 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=routine', '--jobconf={}'. 29 | format(self.routineconf_file), '-s=./seeds']) 30 | cmd = OrthrusAdd(args, self.config) 31 | self.assertFalse(cmd.run()) 32 | shutil.rmtree(self.orthrusdirname) 33 | 34 | # Remove 35 | def test_remove_early_exit(self): 36 | # Job ID does not matter since exit precedes job ID validation 37 | args = parse_cmdline(self.description, ['remove', '-j', '123']) 38 | cmd = OrthrusRemove(args, self.config) 39 | self.assertFalse(cmd.run()) 40 | 41 | # Start 42 | def test_start_early_exit(self): 43 | args = parse_cmdline(self.description, ['start', '-j', '123']) 44 | cmd = OrthrusStart(args, self.config) 45 | self.assertFalse(cmd.run()) 46 | 47 | # Stop 48 | def test_stop_early_exit(self): 49 | args = parse_cmdline(self.description, ['stop', '-j', '123']) 50 | cmd = OrthrusStop(args, self.config, True) 51 | self.assertFalse(cmd.run()) 52 | 53 | # Triage 54 | def test_triage_early_exit(self): 55 | args = parse_cmdline(self.description, ['triage', '-j', '123']) 56 | cmd = OrthrusTriage(args, self.config) 57 | self.assertFalse(cmd.run()) 58 | 59 | def test_triage_asan_exit(self): 60 | if not os.path.isdir(self.orthrusdirname): 61 | os.mkdir(self.orthrusdirname) 62 | args = parse_cmdline(self.description, ['triage', '-j', '123']) 63 | cmd = OrthrusTriage(args, self.config) 64 | self.assertFalse(cmd.run()) 65 | shutil.rmtree(self.orthrusdirname) 66 | 67 | # Coverage 68 | def test_coverage_early_exit(self): 69 | args = parse_cmdline(self.description, ['coverage', '-j', '123']) 70 | cmd = OrthrusCoverage(args, self.config) 71 | self.assertFalse(cmd.run()) 72 | 73 | # Show 74 | def test_show_early_exit(self): 75 | args = parse_cmdline(self.description, ['show', '-j', '123']) 76 | cmd = OrthrusShow(args, self.config) 77 | self.assertFalse(cmd.run()) 78 | 79 | # Destroy 80 | def test_destroy_early_exit(self): 81 | args = parse_cmdline(self.description, ['destroy']) 82 | cmd = OrthrusDestroy(args, self.config, 'y') 83 | self.assertFalse(cmd.run()) -------------------------------------------------------------------------------- /tests/test_gdb_orthrus.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | import unittest 3 | from orthrus.commands import * 4 | from orthrusutils.orthrusutils import * 5 | 6 | class TestGdbOrthrus(unittest.TestCase): 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | config = {'orthrus': {'directory': orthrusdirname}} 10 | routineconf_file = orthrusdirname + '/conf/routineconf.conf' 11 | 12 | def test_gdb_orthrus(self): 13 | cmd = ['gdb', '-q', '-ex=r', '-ex=call $jsonify("tmp.json")', '-ex=quit', '--args', '{}/binaries/harden-dbg/bin/main_no_abort' 14 | .format(self.orthrusdirname), glob.glob('{}/unique/harden/HARDEN*'.format(self.add_cmd.job.rootdir))[0]] 15 | ret = subprocess.Popen(cmd, stdout=open(os.devnull), stderr=subprocess.STDOUT).wait() 16 | self.assertTrue(((ret == 0) and os.path.exists("tmp.json"))) 17 | 18 | @classmethod 19 | def setUpClass(cls): 20 | # Create 21 | args = parse_cmdline(cls.description, ['create', '-fuzz']) 22 | cmd = OrthrusCreate(args, cls.config) 23 | cmd.run() 24 | # Add job 25 | routineconf_dict = {'fuzzer': 'afl-fuzz', 'fuzzer_args': ''} 26 | with open(cls.routineconf_file, 'w') as routineconf_fp: 27 | json.dump(routineconf_dict, routineconf_fp, indent=4) 28 | 29 | args = parse_cmdline(cls.description, ['add', '--job=main_no_abort @@', '--jobtype=routine', '--jobconf={}'. 30 | format(cls.routineconf_file), '-i=./afl-crash-out-rename.tar.gz']) 31 | cls.add_cmd = OrthrusAdd(args, cls.config) 32 | cls.add_cmd.run() 33 | 34 | # Triage 35 | args = parse_cmdline(cls.description, ['triage', '-j', cls.add_cmd.job.id]) 36 | cmd = OrthrusTriage(args, cls.config, test=True) 37 | cmd.run() 38 | 39 | @classmethod 40 | def tearDownClass(cls): 41 | shutil.rmtree(cls.orthrusdirname) 42 | if os.path.exists("tmp.json"): 43 | os.remove("tmp.json") 44 | -------------------------------------------------------------------------------- /tests/test_orthrus_add.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrus.commands import * 3 | from orthrusutils.orthrusutils import * 4 | 5 | class TestOrthrusAdd(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | config = {'orthrus': {'directory': orthrusdirname}} 10 | archive_dir = orthrusdirname + '/archive' 11 | abconf_file = orthrusdirname + '/conf/abconf.conf' 12 | routineconf_file = orthrusdirname + '/conf/routineconf.conf' 13 | 14 | def test_add_job(self): 15 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=routine', '--jobconf={}'. 16 | format(self.routineconf_file)]) 17 | self.cmd = OrthrusAdd(args, self.config) 18 | self.assertTrue(self.cmd.run()) 19 | 20 | def test_add_and_seed(self): 21 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=routine', '--jobconf={}'. 22 | format(self.routineconf_file), '-s=./seeds']) 23 | self.cmd = OrthrusAdd(args, self.config) 24 | self.assertTrue(self.cmd.run()) 25 | 26 | def test_add_and_import(self): 27 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=routine', '--jobconf={}'. 28 | format(self.routineconf_file), '-i=./afl-out.tar.gz']) 29 | self.cmd = OrthrusAdd(args, self.config) 30 | self.assertTrue(self.cmd.run()) 31 | 32 | def test_add_and_import_crashes(self): 33 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=routine', '--jobconf={}'. 34 | format(self.routineconf_file), '-i=./afl-crash-out.tar.gz']) 35 | self.cmd = OrthrusAdd(args, self.config) 36 | self.assertTrue(self.cmd.run()) 37 | 38 | def test_add_and_import_archive(self): 39 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=routine', '--jobconf={}'. 40 | format(self.routineconf_file), '-i=./afl-arch-out.tar.gz']) 41 | self.cmd = OrthrusAdd(args, self.config) 42 | self.assertTrue(self.cmd.run()) 43 | 44 | def test_add_abtest_job(self): 45 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=abtests', '--jobconf={}'. 46 | format(self.abconf_file)]) 47 | self.cmd = OrthrusAdd(args, self.config) 48 | self.assertTrue(self.cmd.run()) 49 | 50 | def test_add_abtest_and_seed(self): 51 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=abtests', '--jobconf={}'. 52 | format(self.abconf_file), '-s=./seeds']) 53 | self.cmd = OrthrusAdd(args, self.config) 54 | self.assertTrue(self.cmd.run()) 55 | 56 | def test_add_abtest_and_import(self): 57 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=abtests', '--jobconf={}'. 58 | format(self.abconf_file), '-i=./afl-out.tar.gz']) 59 | self.cmd = OrthrusAdd(args, self.config) 60 | self.assertTrue(self.cmd.run()) 61 | 62 | def test_add_abtest_and_import_crashes(self): 63 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=abtests', '--jobconf={}'. 64 | format(self.abconf_file), '-i=./afl-crash-out.tar.gz']) 65 | self.cmd = OrthrusAdd(args, self.config) 66 | self.assertTrue(self.cmd.run()) 67 | 68 | def test_add_abtest_and_import_archive(self): 69 | args = parse_cmdline(self.description, ['add', '--job=main @@', '--jobtype=abtests', '--jobconf={}'. 70 | format(self.abconf_file), '-i=./afl-arch-out.tar.gz']) 71 | self.cmd = OrthrusAdd(args, self.config) 72 | self.assertTrue(self.cmd.run()) 73 | 74 | @classmethod 75 | def setUpClass(cls): 76 | args = parse_cmdline(cls.description, ['create', '-asan']) 77 | cmd = OrthrusCreate(args, cls.config) 78 | cmd.run() 79 | abconf_dict = {'num_jobs': 2, 'fuzzerA': 'afl-fuzz', 'fuzzerA_args': '', 'fuzzerB': 'afl-fuzz-fast', 80 | 'fuzzerB_args': ''} 81 | routineconf_dict = {'fuzzer': 'afl-fuzz', 'fuzzer_args': ''} 82 | with open(cls.abconf_file, 'w') as abconf_fp: 83 | json.dump(abconf_dict, abconf_fp, indent=4) 84 | with open(cls.routineconf_file, 'w') as routineconf_fp: 85 | json.dump(routineconf_dict, routineconf_fp, indent=4) 86 | 87 | @classmethod 88 | def tearDownClass(cls): 89 | shutil.rmtree(cls.orthrusdirname) 90 | 91 | def tearDown(self): 92 | OrthrusRemove(parse_cmdline(self.description, ['remove', '-j', self.cmd.job.id]), self.config).run() 93 | shutil.rmtree(self.archive_dir) 94 | os.makedirs(self.archive_dir) -------------------------------------------------------------------------------- /tests/test_orthrus_coverage.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrus.commands import * 3 | from orthrusutils.orthrusutils import * 4 | 5 | class TestOrthrusCoverage(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | config = {'orthrus': {'directory': orthrusdirname}} 10 | abconf_file = orthrusdirname + '/conf/abconf.conf' 11 | routineconf_file = orthrusdirname + '/conf/routineconf.conf' 12 | 13 | def test_coverage(self): 14 | args = parse_cmdline(self.description, ['coverage', '-j', self.add_cmd.job.id]) 15 | cmd = OrthrusCoverage(args, self.config) 16 | self.assertTrue(cmd.run()) 17 | time.sleep(3*TEST_SLEEP) 18 | self.assertTrue(os.path.isfile(self.add_cmd.job.rootdir + '/afl-out/cov/web/index.html')) 19 | 20 | def test_coverage_abtest(self): 21 | args = parse_cmdline(self.description, ['coverage', '-j', self.add_cmd_abtest.job.id]) 22 | cmd = OrthrusCoverage(args, self.config) 23 | self.assertTrue(cmd.run()) 24 | 25 | @classmethod 26 | def setUpClass(cls): 27 | # Create 28 | args = parse_cmdline(cls.description, ['create', '-fuzz', '-cov']) 29 | cmd = OrthrusCreate(args, cls.config) 30 | cmd.run() 31 | # Add routine 32 | routineconf_dict = {'fuzzer': 'afl-fuzz', 'fuzzer_args': ''} 33 | with open(cls.routineconf_file, 'w') as routineconf_fp: 34 | json.dump(routineconf_dict, routineconf_fp, indent=4) 35 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '--jobtype=routine', '--jobconf={}'. 36 | format(cls.routineconf_file), '-s=./seeds']) 37 | cls.add_cmd = OrthrusAdd(args, cls.config) 38 | cls.add_cmd.run() 39 | # Add a/b test 40 | abconf_dict = {'num_jobs':2, 'fuzzerA': 'afl-fuzz', 'fuzzerA_args': '', 'fuzzerB': 'afl-fuzz-fast', 41 | 'fuzzerB_args': ''} 42 | with open(cls.abconf_file, 'w') as abconf_fp: 43 | json.dump(abconf_dict, abconf_fp, indent=4) 44 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '-s=./seeds', '--jobtype=abtests', '--jobconf={}'. 45 | format(cls.abconf_file)]) 46 | cls.add_cmd_abtest = OrthrusAdd(args, cls.config) 47 | cls.add_cmd_abtest.run() 48 | # Start routine 49 | args = parse_cmdline(cls.description, ['start', '-j', cls.add_cmd.job.id]) 50 | start_cmd = OrthrusStart(args, cls.config) 51 | start_cmd.run() 52 | time.sleep(TEST_SLEEP) 53 | # Stop routine 54 | args = parse_cmdline(cls.description, ['stop', '-j', cls.add_cmd.job.id]) 55 | cmd = OrthrusStop(args, cls.config, True) 56 | cmd.run() 57 | 58 | @classmethod 59 | def tearDownClass(cls): 60 | shutil.rmtree(cls.orthrusdirname) -------------------------------------------------------------------------------- /tests/test_orthrus_create.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrus.commands import * 3 | from orthrusutils.orthrusutils import * 4 | 5 | class TestOrthrusCreate(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | config = {'orthrus': {'directory': orthrusdirname}} 10 | blacklist_file = 'asan_blacklist.txt' 11 | 12 | def test_create_asan(self): 13 | args = parse_cmdline(self.description, ['create', '-asan']) 14 | cmd = OrthrusCreate(args, self.config) 15 | self.assertTrue(cmd.run()) 16 | 17 | def test_create_fuzz(self): 18 | args = parse_cmdline(self.description, ['create', '-fuzz']) 19 | cmd = OrthrusCreate(args, self.config) 20 | self.assertTrue(cmd.run()) 21 | 22 | def test_create_cov(self): 23 | args = parse_cmdline(self.description, ['create', '-cov']) 24 | cmd = OrthrusCreate(args, self.config) 25 | self.assertTrue(cmd.run()) 26 | 27 | def test_create_asan_cov(self): 28 | args = parse_cmdline(self.description, ['create', '-asan', '-sancov']) 29 | cmd = OrthrusCreate(args, self.config) 30 | self.assertTrue(cmd.run()) 31 | 32 | def test_create_harden_cov(self): 33 | args = parse_cmdline(self.description, ['create', '-fuzz', '-sancov']) 34 | cmd = OrthrusCreate(args, self.config) 35 | self.assertTrue(cmd.run()) 36 | 37 | def test_create_asan_blacklist_fail(self): 38 | if os.path.exists(self.blacklist_file): 39 | os.remove(self.blacklist_file) 40 | args = parse_cmdline(self.description, ['create', '-asanblacklist']) 41 | cmd = OrthrusCreate(args, self.config) 42 | self.assertFalse(cmd.run()) 43 | self.touch_blacklist() 44 | 45 | def test_create_asan_blacklist(self): 46 | args = parse_cmdline(self.description, ['create', '-asanblacklist']) 47 | cmd = OrthrusCreate(args, self.config) 48 | self.assertTrue(cmd.run()) 49 | 50 | def test_create_dict(self): 51 | args = parse_cmdline(self.description, ['create', '-dict']) 52 | cmd = OrthrusCreate(args, self.config) 53 | # TODO: Fix Test infra for dict 54 | self.assertTrue(cmd.run()) 55 | 56 | @classmethod 57 | def touch_blacklist(cls): 58 | with open(cls.blacklist_file, 'w') as file: 59 | file.write('#') 60 | 61 | @classmethod 62 | def setUpClass(cls): 63 | cls.touch_blacklist() 64 | 65 | def tearDown(self): 66 | shutil.rmtree(self.orthrusdirname) 67 | -------------------------------------------------------------------------------- /tests/test_orthrus_destroy.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrus.commands import * 3 | from orthrusutils.orthrusutils import * 4 | 5 | class TestOrthrusDestroy(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | 10 | def test_destroy(self): 11 | args = parse_cmdline(self.description, ['destroy']) 12 | cmd = OrthrusDestroy(args, self.config, 'y') 13 | self.assertTrue(cmd.run()) 14 | 15 | def setUp(self): 16 | self.config = {'orthrus' : {'directory': self.orthrusdirname}} 17 | args = parse_cmdline(self.description, ['create', '-asan']) 18 | cmd = OrthrusCreate(args, self.config) 19 | cmd.run() -------------------------------------------------------------------------------- /tests/test_orthrus_remove.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrus.commands import * 3 | from orthrusutils.orthrusutils import * 4 | 5 | class TestOrthrusRemove(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | config = {'orthrus': {'directory': orthrusdirname}} 10 | abconf_file = orthrusdirname + '/conf/abconf.conf' 11 | routineconf_file = orthrusdirname + '/conf/routineconf.conf' 12 | 13 | def test_remove_job(self): 14 | # Remove job 15 | args = parse_cmdline(self.description, ['remove', '-j=' + self.add_cmd.job.id]) 16 | cmd = OrthrusRemove(args, self.config) 17 | self.assertTrue(cmd.run()) 18 | 19 | def test_remove_job_abtest(self): 20 | # Remove abtest job 21 | args = parse_cmdline(self.description, ['remove', '-j=' + self.add_cmd_abtests.job.id]) 22 | cmd = OrthrusRemove(args, self.config) 23 | self.assertTrue(cmd.run()) 24 | 25 | @classmethod 26 | def setUpClass(cls): 27 | # Create 28 | args = parse_cmdline(cls.description, ['create', '-asan']) 29 | cmd = OrthrusCreate(args, cls.config) 30 | cmd.run() 31 | 32 | # abtests set up 33 | routineconf_dict = {'fuzzer': 'afl-fuzz', 'fuzzer_args': ''} 34 | with open(cls.routineconf_file, 'w') as routineconf_fp: 35 | json.dump(routineconf_dict, routineconf_fp, indent=4) 36 | 37 | abconf_dict = {'num_jobs':2, 'fuzzerA': 'afl-fuzz', 'fuzzerA_args': '', 'fuzzerB': 'afl-fuzz-fast', 38 | 'fuzzerB_args': ''} 39 | with open(cls.abconf_file, 'w') as abconf_fp: 40 | json.dump(abconf_dict, abconf_fp, indent=4) 41 | 42 | # Add job 43 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '--jobtype=routine', '--jobconf={}'. 44 | format(cls.routineconf_file)]) 45 | cls.add_cmd = OrthrusAdd(args, cls.config) 46 | cls.add_cmd.run() 47 | 48 | # Add abtest job 49 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '--jobtype=abtests', '--jobconf={}'. 50 | format(cls.abconf_file)]) 51 | cls.add_cmd_abtests = OrthrusAdd(args, cls.config) 52 | cls.add_cmd_abtests.run() 53 | 54 | @classmethod 55 | def tearDownClass(cls): 56 | shutil.rmtree(cls.orthrusdirname) -------------------------------------------------------------------------------- /tests/test_orthrus_runtime.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrus.commands import * 3 | from orthrusutils.orthrusutils import * 4 | 5 | class TestOrthrusRuntime(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | config = {'orthrus': {'directory': orthrusdirname}} 10 | abconf_file = orthrusdirname + '/conf/abconf.conf' 11 | routineconf_file = orthrusdirname + '/conf/routineconf.conf' 12 | 13 | def test_runtime_routine_asan(self): 14 | args = parse_cmdline(self.description, ['runtime', '-j', self.add_cmd1.job.id]) 15 | cmd = OrthrusRuntime(args, self.config) 16 | self.assertTrue(cmd.run()) 17 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(self.add_cmd1.job.rootdir))) 18 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(self.add_cmd1.job.rootdir))) 19 | ## Fail cos regen 20 | self.assertFalse(cmd.run()) 21 | ## Regen and check 22 | args = parse_cmdline(self.description, ['runtime', '-j', self.add_cmd1.job.id, '--regenerate']) 23 | cmd = OrthrusRuntime(args, self.config) 24 | self.assertTrue(cmd.run()) 25 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(self.add_cmd1.job.rootdir))) 26 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(self.add_cmd1.job.rootdir))) 27 | 28 | def test_runtime_routine_harden_asan(self): 29 | args = parse_cmdline(self.description, ['runtime', '-j', self.add_cmd2.job.id]) 30 | cmd = OrthrusRuntime(args, self.config) 31 | self.assertTrue(cmd.run()) 32 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(self.add_cmd2.job.rootdir))) 33 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(self.add_cmd2.job.rootdir))) 34 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/harden'.format(self.add_cmd2.job.rootdir))) 35 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/harden/*.json'.format(self.add_cmd2.job.rootdir))) 36 | ## Fail cos regen 37 | self.assertFalse(cmd.run()) 38 | ## Regen and check 39 | args = parse_cmdline(self.description, ['runtime', '-j', self.add_cmd2.job.id, '--regenerate']) 40 | cmd = OrthrusRuntime(args, self.config) 41 | self.assertTrue(cmd.run()) 42 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(self.add_cmd2.job.rootdir))) 43 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(self.add_cmd2.job.rootdir))) 44 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/harden'.format(self.add_cmd2.job.rootdir))) 45 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/harden/*.json'.format(self.add_cmd2.job.rootdir))) 46 | 47 | def test_runtime_abtests(self): 48 | args = parse_cmdline(self.description, ['runtime', '-j', self.add_cmd_abtest.job.id]) 49 | cmd = OrthrusRuntime(args, self.config) 50 | self.assertTrue(cmd.run()) 51 | 52 | # Check if files were generated 53 | joba_root = '{}/{}'.format(self.add_cmd_abtest.job.rootdir, self.add_cmd_abtest.job.jobids[0]) 54 | jobb_root = '{}/{}'.format(self.add_cmd_abtest.job.rootdir, self.add_cmd_abtest.job.jobids[1]) 55 | 56 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(joba_root))) 57 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(joba_root))) 58 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(jobb_root))) 59 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(jobb_root))) 60 | ## Fail cos regen 61 | self.assertFalse(cmd.run()) 62 | ## Regen and check 63 | args = parse_cmdline(self.description, ['runtime', '-j', self.add_cmd_abtest.job.id, '--regenerate']) 64 | cmd = OrthrusRuntime(args, self.config) 65 | self.assertTrue(cmd.run()) 66 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(joba_root))) 67 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(joba_root))) 68 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(jobb_root))) 69 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(jobb_root))) 70 | 71 | def test_runtime_abtests_harden_asan(self): 72 | args = parse_cmdline(self.description, ['runtime', '-j', self.add_cmd_abtest2.job.id]) 73 | cmd = OrthrusRuntime(args, self.config) 74 | self.assertTrue(cmd.run()) 75 | 76 | # Check if files were generated 77 | joba_root = '{}/{}'.format(self.add_cmd_abtest2.job.rootdir, self.add_cmd_abtest2.job.jobids[0]) 78 | jobb_root = '{}/{}'.format(self.add_cmd_abtest2.job.rootdir, self.add_cmd_abtest2.job.jobids[1]) 79 | 80 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(joba_root))) 81 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(joba_root))) 82 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/harden'.format(joba_root))) 83 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/harden/*.json'.format(joba_root))) 84 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(jobb_root))) 85 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(jobb_root))) 86 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/harden'.format(jobb_root))) 87 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/harden/*.json'.format(jobb_root))) 88 | ## Fail cos regen 89 | self.assertFalse(cmd.run()) 90 | ## Regen and check 91 | args = parse_cmdline(self.description, ['runtime', '-j', self.add_cmd_abtest2.job.id, '--regenerate']) 92 | cmd = OrthrusRuntime(args, self.config) 93 | self.assertTrue(cmd.run()) 94 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(joba_root))) 95 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(joba_root))) 96 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/harden'.format(joba_root))) 97 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/harden/*.json'.format(joba_root))) 98 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/asan'.format(jobb_root))) 99 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/asan/*.json'.format(jobb_root))) 100 | self.assertTrue(os.path.exists('{}/crash-analysis/runtime/harden'.format(jobb_root))) 101 | self.assertTrue(glob.glob('{}/crash-analysis/runtime/harden/*.json'.format(jobb_root))) 102 | 103 | @classmethod 104 | def setUpClass(cls): 105 | # Create 106 | args = parse_cmdline(cls.description, ['create', '-asan', '-fuzz']) 107 | cmd = OrthrusCreate(args, cls.config) 108 | cmd.run() 109 | # Add routine job 1 (asan only) 110 | routineconf_dict = {'fuzzer': 'afl-fuzz', 'fuzzer_args': ''} 111 | with open(cls.routineconf_file, 'w') as routineconf_fp: 112 | json.dump(routineconf_dict, routineconf_fp, indent=4) 113 | 114 | args = parse_cmdline(cls.description, ['add', '--job=test_asan @@', '--jobtype=routine', '--jobconf={}'. 115 | format(cls.routineconf_file), '-i=./afl-crash-out-rename.tar.gz']) 116 | cls.add_cmd1 = OrthrusAdd(args, cls.config) 117 | cls.add_cmd1.run() 118 | 119 | args = parse_cmdline(cls.description, ['triage', '-j', cls.add_cmd1.job.id]) 120 | cmd = OrthrusTriage(args, cls.config, test=True) 121 | cmd.run() 122 | 123 | # Add routine job 2 (harden+asan) 124 | args = parse_cmdline(cls.description, ['add', '--job=main_no_abort @@', '--jobtype=routine', '--jobconf={}'. 125 | format(cls.routineconf_file), '-i=./afl-crash-out-rename.tar.gz']) 126 | cls.add_cmd2 = OrthrusAdd(args, cls.config) 127 | cls.add_cmd2.run() 128 | 129 | args = parse_cmdline(cls.description, ['triage', '-j', cls.add_cmd2.job.id]) 130 | cmd = OrthrusTriage(args, cls.config, test=True) 131 | cmd.run() 132 | 133 | # Add a/b test job (asan only) 134 | abconf_dict = {'num_jobs':2, 'fuzzerA': 'afl-fuzz', 'fuzzerA_args': '', 'fuzzerB': 'afl-fuzz-fast', 135 | 'fuzzerB_args': ''} 136 | with open(cls.abconf_file, 'w') as abconf_fp: 137 | json.dump(abconf_dict, abconf_fp, indent=4) 138 | args = parse_cmdline(cls.description, ['add', '--job=test_asan @@', '-i=./afl-crash-out.tar.gz', '--jobconf={}'. 139 | format(cls.abconf_file), '--jobtype=abtests']) 140 | cls.add_cmd_abtest = OrthrusAdd(args, cls.config) 141 | cls.add_cmd_abtest.run() 142 | 143 | args = parse_cmdline(cls.description, ['triage', '-j', cls.add_cmd_abtest.job.id]) 144 | cmd = OrthrusTriage(args, cls.config, test=True) 145 | cmd.run() 146 | 147 | # Add a/b test job (asan + harden) 148 | args = parse_cmdline(cls.description, ['add', '--job=main_no_abort @@', '-i=./afl-crash-out.tar.gz', '--jobconf={}'. 149 | format(cls.abconf_file), '--jobtype=abtests']) 150 | cls.add_cmd_abtest2 = OrthrusAdd(args, cls.config) 151 | cls.add_cmd_abtest2.run() 152 | 153 | args = parse_cmdline(cls.description, ['triage', '-j', cls.add_cmd_abtest2.job.id]) 154 | cmd = OrthrusTriage(args, cls.config, test=True) 155 | cmd.run() 156 | 157 | @classmethod 158 | def tearDownClass(cls): 159 | shutil.rmtree(cls.orthrusdirname) -------------------------------------------------------------------------------- /tests/test_orthrus_show.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrus.commands import * 3 | from orthrusutils.orthrusutils import * 4 | 5 | class TestOrthrusShow(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | config = {'orthrus': {'directory': orthrusdirname}} 10 | abconf_file = orthrusdirname + '/conf/abconf.conf' 11 | routineconf_file = orthrusdirname + '/conf/routineconf.conf' 12 | 13 | def test_show_jobs(self): 14 | args = parse_cmdline(self.description, ['show', '-conf']) 15 | cmd = OrthrusShow(args, self.config) 16 | self.assertTrue(cmd.run()) 17 | 18 | def test_show_status(self): 19 | # Start/show/stop job 20 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd.job.id]) 21 | start_cmd = OrthrusStart(args, self.config) 22 | start_cmd.run() 23 | time.sleep(2*TEST_SLEEP) 24 | args = parse_cmdline(self.description, ['show', '-j', self.add_cmd.job.id]) 25 | cmd = OrthrusShow(args, self.config) 26 | self.assertTrue(cmd.run()) 27 | args = parse_cmdline(self.description, ['stop', '-j', self.add_cmd.job.id]) 28 | stop_cmd = OrthrusStop(args, self.config, True) 29 | stop_cmd.run() 30 | 31 | def test_show_status_abtest(self): 32 | # Start/show/stop job 33 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd_abtest.job.id]) 34 | start_cmd = OrthrusStart(args, self.config) 35 | start_cmd.run() 36 | time.sleep(2 * TEST_SLEEP) 37 | args = parse_cmdline(self.description, ['show', '-j', self.add_cmd_abtest.job.id]) 38 | cmd = OrthrusShow(args, self.config) 39 | self.assertTrue(cmd.run()) 40 | args = parse_cmdline(self.description, ['stop', '-j', self.add_cmd_abtest.job.id]) 41 | stop_cmd = OrthrusStop(args, self.config, True) 42 | stop_cmd.run() 43 | 44 | def test_show_cov(self): 45 | # Start/sleep/stop job 46 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd.job.id, '-c']) 47 | start_cmd = OrthrusStart(args, self.config, True) 48 | start_cmd.run() 49 | # Long sleep so that afl-cov catches up 50 | time.sleep(2*TEST_SLEEP) 51 | args = parse_cmdline(self.description, ['stop', '-j', self.add_cmd.job.id]) 52 | stop_cmd = OrthrusStop(args, self.config, True) 53 | stop_cmd.run() 54 | # Sleep again so afl-cov finishes 55 | time.sleep(2*TEST_SLEEP) 56 | args = parse_cmdline(self.description, ['show', '-j', self.add_cmd.job.id, '-cov']) 57 | cmd = OrthrusShow(args, self.config, True) 58 | self.assertTrue(cmd.run()) 59 | 60 | def test_show_cov_abtest(self): 61 | # Start/sleep/stop job 62 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd_abtest.job.id, '-c']) 63 | start_cmd = OrthrusStart(args, self.config, True) 64 | start_cmd.run() 65 | # Long sleep so that afl-cov catches up 66 | time.sleep(2*TEST_SLEEP) 67 | args = parse_cmdline(self.description, ['stop', '-j', self.add_cmd_abtest.job.id, '-c']) 68 | stop_cmd = OrthrusStop(args, self.config, True) 69 | stop_cmd.run() 70 | # Sleep again so afl-cov finishes 71 | time.sleep(TEST_SLEEP) 72 | args = parse_cmdline(self.description, ['show', '-j', self.add_cmd_abtest.job.id, '-cov']) 73 | cmd = OrthrusShow(args, self.config, True) 74 | self.assertTrue(cmd.run()) 75 | 76 | @classmethod 77 | def setUpClass(cls): 78 | # Create 79 | args = parse_cmdline(cls.description, ['create', '-fuzz', '-cov']) 80 | cmd = OrthrusCreate(args, cls.config) 81 | cmd.run() 82 | # Add job 83 | routineconf_dict = {'fuzzer': 'afl-fuzz', 'fuzzer_args': ''} 84 | with open(cls.routineconf_file, 'w') as routineconf_fp: 85 | json.dump(routineconf_dict, routineconf_fp, indent=4) 86 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '-s=./seeds', '--jobtype=routine', 87 | '--jobconf={}'.format(cls.routineconf_file)]) 88 | cls.add_cmd = OrthrusAdd(args, cls.config) 89 | cls.add_cmd.run() 90 | # Add a/b test job 91 | abconf_dict = {'num_jobs': 2, 'fuzzerA': 'afl-fuzz', 'fuzzerA_args': '', 'fuzzerB': 'afl-fuzz-fast', 92 | 'fuzzerB_args': ''} 93 | with open(cls.abconf_file, 'w') as abconf_fp: 94 | json.dump(abconf_dict, abconf_fp, indent=4) 95 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '-s=./seeds', '--jobconf={}'. 96 | format(cls.abconf_file), '--jobtype=abtests']) 97 | cls.add_cmd_abtest = OrthrusAdd(args, cls.config) 98 | cls.add_cmd_abtest.run() 99 | 100 | @classmethod 101 | def tearDownClass(cls): 102 | shutil.rmtree(cls.orthrusdirname) -------------------------------------------------------------------------------- /tests/test_orthrus_spectrum.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrus.commands import * 3 | from orthrusutils.orthrusutils import * 4 | 5 | class TestOrthrusSpectrum(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | config = {'orthrus': {'directory': orthrusdirname}} 10 | abconf_file = orthrusdirname + '/conf/abconf.conf' 11 | routineconf_file = orthrusdirname + '/conf/routineconf.conf' 12 | 13 | def compare_dice_json(self, file1, file2): 14 | 15 | expected_line_substring = 'src/main.cpp:main:37:5' 16 | 17 | with open(file1) as data_file1: 18 | data1 = json.load(data_file1) 19 | with open(file2) as data_file2: 20 | data2 = json.load(data_file2) 21 | 22 | self.assertEqual(data1["shrink-percent"], data2["shrink-percent"], 'Shrink percent did not match') 23 | self.assertEqual(data1["dice-linecount"], data2["dice-linecount"], 'Dice line count did not match') 24 | self.assertEqual(data1["slice-linecount"], data2["slice-linecount"], 'Slice line count did not match') 25 | self.assertEqual(data1["diff-node-spec"][0]["count"], data2["diff-node-spec"][0]["count"], 26 | 'Dice frequency did not match') 27 | self.assertTrue(expected_line_substring in data1["diff-node-spec"][0]["line"], 28 | 'Dice line did not match') 29 | self.assertEqual(data1["crashing-input"], data2["crashing-input"], 'Crashing input did not match') 30 | if 'parent-input' in data1 and 'parent-input' in data2: 31 | self.assertEqual(data1["parent-input"], data2["parent-input"], 'Parent input did not match') 32 | return True 33 | 34 | def compare_slice_json(self, file1, file2): 35 | with open(file1) as data_file1: 36 | data1 = json.load(data_file1) 37 | with open(file2) as data_file2: 38 | data2 = json.load(data_file2) 39 | self.assertEqual(data1["crashing-input"], data2["crashing-input"], 'Crashing input did not match') 40 | self.assertEqual(data1["slice-linecount"], data2["slice-linecount"], 'Slice line count did not match') 41 | return True 42 | 43 | def output_assert(self): 44 | # Output checks 45 | self.assertTrue(os.path.exists(self.dice_dir), "No dice dir generated") 46 | self.assertTrue(os.path.exists(self.slice_dir), "No slice dir generated") 47 | self.assertTrue(self.compare_slice_json(self.gen_slice, self.exp_slice)) 48 | return True 49 | 50 | def test_spectrum(self): 51 | args = parse_cmdline(self.description, ['spectrum', '-j', self.add_cmd.job.id, '-q']) 52 | cmd = OrthrusSpectrum(args, self.config) 53 | self.assertTrue(cmd.run()) 54 | 55 | # Output checks 56 | self.assertTrue(self.output_assert()) 57 | self.assertTrue(self.compare_dice_json(self.gen_dice, self.exp_dice_single)) 58 | 59 | def test_spectrum_sancovbug(self): 60 | args = parse_cmdline(self.description, ['spectrum', '-j', self.add_cmd.job.id, '-q', '--overwrite', 61 | '--sancov-bug']) 62 | cmd = OrthrusSpectrum(args, self.config) 63 | self.assertTrue(cmd.run()) 64 | 65 | # Output checks 66 | self.assertTrue(self.output_assert()) 67 | self.assertTrue(self.compare_dice_json(self.gen_dice, self.exp_dice_single)) 68 | 69 | def test_version(self): 70 | args = parse_cmdline(self.description, ['spectrum', '-j', self.add_cmd.job.id, '--version']) 71 | cmd = OrthrusSpectrum(args, self.config) 72 | self.assertTrue(cmd.run()) 73 | 74 | def tests_overwrite_dir(self): 75 | args = parse_cmdline(self.description, ['spectrum', '-j', self.add_cmd.job.id, '-q']) 76 | cmd = OrthrusSpectrum(args, self.config) 77 | self.assertFalse(cmd.run()) 78 | 79 | def test_spectrum_abtest(self): 80 | args = parse_cmdline(self.description, ['spectrum', '-j', self.add_cmd_abtest.job.id]) 81 | cmd = OrthrusSpectrum(args, self.config) 82 | self.assertTrue(cmd.run()) 83 | 84 | def test_spectrum_multiple(self): 85 | args = parse_cmdline(self.description, ['spectrum', '-j', self.add_cmd.job.id, '--dd-num=3', '--overwrite']) 86 | cmd = OrthrusSpectrum(args, self.config) 87 | self.assertTrue(cmd.run()) 88 | 89 | # Output checks 90 | self.assertTrue(self.output_assert()) 91 | self.assertTrue(self.compare_dice_json(self.gen_dice, self.exp_dice_multiple)) 92 | 93 | def test_spectrum_multiple_sancovbug(self): 94 | args = parse_cmdline(self.description, ['spectrum', '-j', self.add_cmd.job.id, '--dd-num=3', '--overwrite', 95 | '--sancov-bug']) 96 | cmd = OrthrusSpectrum(args, self.config) 97 | self.assertTrue(cmd.run()) 98 | 99 | # Output checks 100 | self.assertTrue(self.output_assert()) 101 | self.assertTrue(self.compare_dice_json(self.gen_dice, self.exp_dice_multiple)) 102 | 103 | @classmethod 104 | def setUpClass(cls): 105 | # Create 106 | args = parse_cmdline(cls.description, ['create', '-asan', '-fuzz', '-sancov']) 107 | cmd = OrthrusCreate(args, cls.config) 108 | cmd.run() 109 | # Add routine job 110 | routineconf_dict = {'fuzzer': 'afl-fuzz', 'fuzzer_args': ''} 111 | with open(cls.routineconf_file, 'w') as routineconf_fp: 112 | json.dump(routineconf_dict, routineconf_fp, indent=4) 113 | 114 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '-i=./afl-crash-out.tar.gz', '--jobtype=routine', 115 | '--jobconf={}'.format(cls.routineconf_file)]) 116 | cls.add_cmd = OrthrusAdd(args, cls.config) 117 | cls.add_cmd.run() 118 | # Start routine job fuzzing 119 | # args = parse_cmdline(cls.description, ['start', '-j', cls.add_cmd.job.id]) 120 | # cmd = OrthrusStart(args, cls.config) 121 | # cmd.run() 122 | # time.sleep(2*TEST_SLEEP) 123 | # # Stop routine job fuzzing 124 | # args = parse_cmdline(cls.description, ['stop', '-j', cls.add_cmd.job.id]) 125 | # cmd = OrthrusStop(args, cls.config) 126 | # cmd.run() 127 | 128 | args = parse_cmdline(cls.description, ['triage', '-j', cls.add_cmd.job.id]) 129 | cmd = OrthrusTriage(args, cls.config, test=True) 130 | cmd.run() 131 | 132 | # Add a/b test job 133 | abconf_dict = {'num_jobs': 2, 'fuzzerA': 'afl-fuzz', 'fuzzerA_args': '', 'fuzzerB': 'afl-fuzz-fast', 'fuzzerB_args': ''} 134 | with open(cls.abconf_file, 'w') as abconf_fp: 135 | json.dump(abconf_dict, abconf_fp, indent=4) 136 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '-i=./afl-crash-out.tar.gz', '--jobconf={}'. 137 | format(cls.abconf_file), '--jobtype=abtests']) 138 | cls.add_cmd_abtest = OrthrusAdd(args, cls.config) 139 | cls.add_cmd_abtest.run() 140 | # Start a/b test job 141 | # args = parse_cmdline(cls.description, ['start', '-j', cls.add_cmd_abtest.job.id]) 142 | # cmd = OrthrusStart(args, cls.config) 143 | # cmd.run() 144 | # time.sleep(2 * TEST_SLEEP) 145 | # # Stop a/b test job 146 | # args = parse_cmdline(cls.description, ['stop', '-j', cls.add_cmd_abtest.job.id]) 147 | # cmd = OrthrusStop(args, cls.config) 148 | # cmd.run() 149 | # Simulate old triage 150 | sim_unique_dir = cls.orthrusdirname + '/jobs/abtests/{}/{}/unique'.format(cls.add_cmd_abtest.job.id, 151 | cls.add_cmd_abtest.job.jobids[0]) 152 | if not os.path.isdir(sim_unique_dir): 153 | os.mkdir(sim_unique_dir) 154 | 155 | args = parse_cmdline(cls.description, ['triage', '-j', cls.add_cmd_abtest.job.id]) 156 | cmd = OrthrusTriage(args, cls.config, test=True) 157 | cmd.run() 158 | 159 | ## Expected filenames 160 | cls.gen_dice = '{}/crash-analysis/spectrum/asan/dice/ASAN:SESSION000:id:000000,sig:06,src:000000,op:havoc,rep:2.json' \ 161 | .format(cls.add_cmd.job.rootdir) 162 | cls.exp_dice_single = './expects/asan/spectrum/dice/single/' \ 163 | 'ASAN:SESSION000:id:000000,sig:06,src:000000,op:havoc,rep:2.json' 164 | cls.exp_dice_multiple = './expects/asan/spectrum/dice/multiple/' \ 165 | 'ASAN:SESSION000:id:000000,sig:06,src:000000,op:havoc,rep:2.json' 166 | 167 | cls.gen_slice = '{}/crash-analysis/spectrum/asan/slice/ASAN:SESSION000:id:000000,sig:06,src:000000,op:havoc,rep:2.json' \ 168 | .format(cls.add_cmd.job.rootdir) 169 | cls.exp_slice = './expects/asan/spectrum/slice/ASAN:SESSION000:id:000000,sig:06,src:000000,op:havoc,rep:2.json' 170 | 171 | cls.dice_dir = '{}/crash-analysis/spectrum/asan/dice'.format(cls.add_cmd.job.rootdir) 172 | cls.slice_dir = '{}/crash-analysis/spectrum/asan/slice'.format(cls.add_cmd.job.rootdir) 173 | 174 | @classmethod 175 | def tearDownClass(cls): 176 | shutil.rmtree(cls.orthrusdirname) -------------------------------------------------------------------------------- /tests/test_orthrus_start.py: -------------------------------------------------------------------------------- 1 | import time 2 | import unittest 3 | from orthrus.commands import * 4 | from orthrusutils.orthrusutils import * 5 | 6 | class TestOrthrusStart(unittest.TestCase): 7 | 8 | description = 'Test harness' 9 | orthrusdirname = '.orthrus' 10 | config = {'orthrus': {'directory': orthrusdirname}} 11 | abconf_file = orthrusdirname + '/conf/abconf.conf' 12 | routineconf_file = orthrusdirname + '/conf/routineconf.conf' 13 | 14 | def test_start(self): 15 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd.job.id]) 16 | cmd = OrthrusStart(args, self.config) 17 | self.assertTrue(cmd.run()) 18 | 19 | def test_resume_and_minimize(self): 20 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd.job.id]) 21 | cmd = OrthrusStart(args, self.config) 22 | self.assertTrue(cmd.run()) 23 | time.sleep(TEST_SLEEP) 24 | args = parse_cmdline(self.description, ['stop', '-j', self.add_cmd.job.id]) 25 | cmd = OrthrusStop(args, self.config, True) 26 | self.assertTrue(cmd.run()) 27 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd.job.id, '-m']) 28 | cmd = OrthrusStart(args, self.config) 29 | self.assertTrue(cmd.run()) 30 | 31 | def test_start_coverage(self): 32 | self.is_coverage = True 33 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd.job.id, '-c']) 34 | cmd = OrthrusStart(args, self.config, True) 35 | self.assertTrue(cmd.run()) 36 | 37 | def test_start_abtest(self): 38 | self.is_abtest = True 39 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd_abtest.job.id]) 40 | cmd = OrthrusStart(args, self.config) 41 | self.assertTrue(cmd.run()) 42 | 43 | def test_resume_and_minimize_abtest(self): 44 | self.is_abtest = True 45 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd_abtest.job.id]) 46 | cmd = OrthrusStart(args, self.config) 47 | self.assertTrue(cmd.run()) 48 | time.sleep(TEST_SLEEP) 49 | args = parse_cmdline(self.description, ['stop', '-j', self.add_cmd_abtest.job.id]) 50 | cmd = OrthrusStop(args, self.config, True) 51 | self.assertTrue(cmd.run()) 52 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd_abtest.job.id, '-m']) 53 | cmd = OrthrusStart(args, self.config) 54 | self.assertTrue(cmd.run()) 55 | 56 | def test_start_coverage_abtest(self): 57 | self.is_coverage = True 58 | self.is_abtest = True 59 | args = parse_cmdline(self.description, ['start', '-j', self.add_cmd_abtest.job.id, '-c']) 60 | cmd = OrthrusStart(args, self.config) 61 | self.assertTrue(cmd.run()) 62 | 63 | def setUp(self): 64 | self.is_coverage = False 65 | self.is_abtest = False 66 | 67 | def tearDown(self): 68 | if not self.is_coverage: 69 | if self.is_abtest: 70 | args = parse_cmdline(self.description, ['stop', '-j', self.add_cmd_abtest.job.id]) 71 | else: 72 | args = parse_cmdline(self.description, ['stop', '-j', self.add_cmd.job.id]) 73 | else: 74 | # Sleep until afl-cov records its pid in afl-cov-status file and then stop 75 | time.sleep(TEST_SLEEP) 76 | if self.is_abtest: 77 | args = parse_cmdline(self.description, ['stop', '-j', self.add_cmd_abtest.job.id, '-c']) 78 | else: 79 | args = parse_cmdline(self.description, ['stop', '-j', self.add_cmd.job.id, '-c']) 80 | cmd = OrthrusStop(args, self.config, True) 81 | self.assertTrue(cmd.run()) 82 | 83 | @classmethod 84 | def setUpClass(cls): 85 | args = parse_cmdline(cls.description, ['create', '-asan', '-fuzz', '-cov']) 86 | cmd = OrthrusCreate(args, cls.config) 87 | cmd.run() 88 | 89 | # Add routine job 90 | routineconf_dict = {'fuzzer': 'afl-fuzz', 'fuzzer_args': ''} 91 | with open(cls.routineconf_file, 'w') as routineconf_fp: 92 | json.dump(routineconf_dict, routineconf_fp, indent=4) 93 | 94 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '-s=./seeds/dummy_seed0', '--jobtype=routine', 95 | '--jobconf={}'.format(cls.routineconf_file)]) 96 | cls.add_cmd = OrthrusAdd(args, cls.config) 97 | cls.add_cmd.run() 98 | 99 | ## abtest 100 | abconf_dict = {'num_jobs': 2, 'fuzzerA': 'afl-fuzz', 'fuzzerA_args': '', 'fuzzerB': 'afl-fuzz-fast', 101 | 'fuzzerB_args': '-p coe'} 102 | with open(cls.abconf_file, 'w') as abconf_fp: 103 | json.dump(abconf_dict, abconf_fp, indent=4) 104 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '-s=./seeds', '--jobconf={}'.format(cls.abconf_file), 105 | '--jobtype=abtests']) 106 | cls.add_cmd_abtest = OrthrusAdd(args, cls.config) 107 | cls.add_cmd_abtest.run() 108 | 109 | @classmethod 110 | def tearDownClass(cls): 111 | shutil.rmtree(cls.orthrusdirname) -------------------------------------------------------------------------------- /tests/test_orthrus_triage.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrus.commands import * 3 | from orthrusutils.orthrusutils import * 4 | 5 | class TestOrthrusTriage(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | config = {'orthrus': {'directory': orthrusdirname}} 10 | abconf_file = orthrusdirname + '/conf/abconf.conf' 11 | routineconf_file = orthrusdirname + '/conf/routineconf.conf' 12 | 13 | def test_triage(self): 14 | args = parse_cmdline(self.description, ['triage', '-j', self.add_cmd.job.id]) 15 | cmd = OrthrusTriage(args, self.config, test=True) 16 | self.assertTrue(cmd.run()) 17 | 18 | def test_triage_abtest(self): 19 | args = parse_cmdline(self.description, ['triage', '-j', self.add_cmd_abtest.job.id]) 20 | cmd = OrthrusTriage(args, self.config, test=True) 21 | self.assertTrue(cmd.run()) 22 | 23 | @classmethod 24 | def setUpClass(cls): 25 | # Create 26 | args = parse_cmdline(cls.description, ['create', '-asan']) 27 | cmd = OrthrusCreate(args, cls.config) 28 | cmd.run() 29 | # Add routine job 30 | routineconf_dict = {'fuzzer': 'afl-fuzz', 'fuzzer_args': ''} 31 | with open(cls.routineconf_file, 'w') as routineconf_fp: 32 | json.dump(routineconf_dict, routineconf_fp, indent=4) 33 | 34 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '-s=./seeds', '--jobtype=routine', 35 | '--jobconf={}'.format(cls.routineconf_file)]) 36 | cls.add_cmd = OrthrusAdd(args, cls.config) 37 | cls.add_cmd.run() 38 | # Start routine job fuzzing 39 | args = parse_cmdline(cls.description, ['start', '-j', cls.add_cmd.job.id]) 40 | cmd = OrthrusStart(args, cls.config) 41 | cmd.run() 42 | time.sleep(2*TEST_SLEEP) 43 | # Stop routine job fuzzing 44 | args = parse_cmdline(cls.description, ['stop', '-j', cls.add_cmd.job.id]) 45 | cmd = OrthrusStop(args, cls.config, True) 46 | cmd.run() 47 | # Add a/b test job 48 | abconf_dict = {'num_jobs': 2, 'fuzzerA': 'afl-fuzz', 'fuzzerA_args': '', 'fuzzerB': 'afl-fuzz-fast', 49 | 'fuzzerB_args': ''} 50 | with open(cls.abconf_file, 'w') as abconf_fp: 51 | json.dump(abconf_dict, abconf_fp, indent=4) 52 | args = parse_cmdline(cls.description, ['add', '--job=main @@', '-s=./seeds', '--jobconf={}'. 53 | format(cls.abconf_file), '--jobtype=abtests']) 54 | cls.add_cmd_abtest = OrthrusAdd(args, cls.config) 55 | cls.add_cmd_abtest.run() 56 | # Start a/b test job 57 | args = parse_cmdline(cls.description, ['start', '-j', cls.add_cmd_abtest.job.id]) 58 | cmd = OrthrusStart(args, cls.config) 59 | cmd.run() 60 | time.sleep(2 * TEST_SLEEP) 61 | # Stop a/b test job 62 | args = parse_cmdline(cls.description, ['stop', '-j', cls.add_cmd_abtest.job.id]) 63 | cmd = OrthrusStop(args, cls.config, True) 64 | cmd.run() 65 | # Simulate old triage unique and exploitable dirs 66 | sim_dirs = [] 67 | for id in cls.add_cmd_abtest.job.jobids: 68 | sim_dirs.append(cls.orthrusdirname + '/jobs/abtests/{}/{}/unique'.format(cls.add_cmd_abtest.job.id, id)) 69 | sim_dirs.append(cls.orthrusdirname + '/jobs/abtests/{}/{}/exploitable'.format(cls.add_cmd_abtest.job.id, id)) 70 | 71 | for dir in sim_dirs: 72 | os.makedirs(dir) 73 | 74 | @classmethod 75 | def tearDownClass(cls): 76 | shutil.rmtree(cls.orthrusdirname) -------------------------------------------------------------------------------- /tests/test_validation.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from orthrusutils.orthrusutils import * 3 | from orthrus.commands import * 4 | 5 | class TestOrthrusValidate(unittest.TestCase): 6 | 7 | description = 'Test harness' 8 | orthrusdirname = '.orthrus' 9 | 10 | def test_validate(self): 11 | args = parse_cmdline(self.description, ['validate']) 12 | cmd = OrthrusValidate(args, self.config_pass) 13 | self.assertTrue(cmd.run()) 14 | 15 | def test_validate_false(self): 16 | args = parse_cmdline(self.description, ['validate']) 17 | cmd = OrthrusValidate(args, self.config_fail) 18 | self.assertFalse(cmd.run()) 19 | 20 | def setUp(self): 21 | # self.config_pass = {'dependencies': [('clang', 'on'), ('gcc', 'on'), ('afl-fuzz', 'on'), 22 | # ('afl-clang', 'on'), ('afl-clang++', 'on'), 23 | # ('afl-collect', 'on'), ('afl-minimize', 'on'), 24 | # ('afl-multicore', 'on'), ('gdb', 'on'), ('afl-cov', 'on'), 25 | # ('lcov', 'on'), ('genhtml', 'on')]} 26 | self.config_pass = parse_config('../../conf/orthrus.conf') 27 | 28 | self.config_fail = {'dependencies': [('joombaloomba', 'on')]} -------------------------------------------------------------------------------- /tool/orthrus: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | 3 | import orthrus 4 | from orthrus.commands import * 5 | from orthrusutils.orthrusutils import * 6 | 7 | DESCRIPTION = bcolors.OKBLUE + "Orthrus {}".format(orthrus.__version__) + bcolors.ENDC + " by {} <{}> " \ 8 | .format(orthrus.__author_name__, orthrus.__author_email__) 9 | 10 | class OrthrusTool(): 11 | def __init__(self, description, args): 12 | self._description = description 13 | self._args = parse_cmdline(description, args, self._create, 14 | self._add, self._remove, 15 | self._start, self._stop, self._show, 16 | self._triage, self._coverage, self._spectrum, 17 | self._runtime, self._destroy, self._validate) 18 | self._config = parse_config() 19 | 20 | def _create(self, args): 21 | cmd = OrthrusCreate(args, self._config) 22 | cmd.run() 23 | 24 | def _add(self, args): 25 | cmd = OrthrusAdd(args, self._config) 26 | cmd.run() 27 | 28 | def _remove(self, args): 29 | cmd = OrthrusRemove(args, self._config) 30 | cmd.run() 31 | 32 | def _start(self, args): 33 | cmd = OrthrusStart(args, self._config) 34 | cmd.run() 35 | 36 | def _stop(self, args): 37 | cmd = OrthrusStop(args, self._config) 38 | cmd.run() 39 | 40 | def _show(self, args): 41 | cmd = OrthrusShow(args, self._config) 42 | cmd.run() 43 | 44 | def _triage(self, args): 45 | cmd = OrthrusTriage(args, self._config) 46 | cmd.run() 47 | 48 | def _coverage(self, args): 49 | cmd = OrthrusCoverage(args, self._config) 50 | cmd.run() 51 | 52 | def _spectrum(self, args): 53 | cmd = OrthrusSpectrum(args, self._config) 54 | cmd.run() 55 | 56 | def _runtime(self, args): 57 | cmd = OrthrusRuntime(args, self._config) 58 | cmd.run() 59 | 60 | def _destroy(self, args): 61 | cmd = OrthrusDestroy(args, self._config) 62 | cmd.run() 63 | 64 | def _validate(self, args): 65 | cmd = OrthrusValidate(args, self._config) 66 | cmd.run() 67 | 68 | def run(self): 69 | sys.stdout.write(self._description + "\n\n") 70 | 71 | return self._args.func(self._args) 72 | 73 | if __name__ == '__main__': 74 | tool = OrthrusTool(DESCRIPTION, sys.argv[1:]) 75 | tool.run() -------------------------------------------------------------------------------- /triagetool/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/test-pipeline/orthrus/7e916f36ceffcc4fdd9013a4d952649f69738aa8/triagetool/__init__.py --------------------------------------------------------------------------------