├── .bumpversion.cfg
├── .github
└── workflows
│ └── codeql-analysis.yml
├── .gitignore
├── .travis.yml
├── LICENSE
├── MANIFEST.in
├── README.md
├── eth_vertigo
├── __init__.py
├── cli
│ ├── __init__.py
│ └── main.py
├── core
│ ├── __init__.py
│ ├── campaign.py
│ ├── filter.py
│ ├── filters
│ │ ├── __init__.py
│ │ ├── exclude_filter.py
│ │ └── sample_filter.py
│ ├── mutation.py
│ ├── network
│ │ ├── __init__.py
│ │ ├── ganache.py
│ │ └── pool.py
│ ├── suggest.py
│ └── templates
│ │ ├── mutation_template.jinja2
│ │ └── report_template.jinja2
├── incremental
│ ├── __init__.py
│ ├── record.py
│ ├── store.py
│ └── suggester.py
├── interfaces
│ ├── __init__.py
│ ├── common
│ │ ├── __init__.py
│ │ └── tester.py
│ ├── generics.py
│ ├── hardhat
│ │ ├── __init__.py
│ │ ├── compile.py
│ │ ├── core.py
│ │ ├── mutator.py
│ │ └── tester.py
│ └── truffle
│ │ ├── __init__.py
│ │ ├── compiler.py
│ │ ├── core.py
│ │ ├── mutator.py
│ │ └── tester.py
├── mutator
│ ├── __init__.py
│ ├── mutation_configuration.py
│ ├── mutator.py
│ ├── solidity
│ │ ├── __init__.py
│ │ ├── solidity_file.py
│ │ └── solidity_mutator.py
│ ├── source_file.py
│ ├── source_mutator.py
│ └── universal_mutator
│ │ ├── __init__.py
│ │ ├── rule.py
│ │ └── rule_loader.py
└── test_runner
│ ├── __init__.py
│ ├── exceptions.py
│ ├── file_editor.py
│ ├── runner.py
│ └── test_result.py
├── requirements.txt
├── setup.py
├── test
├── mutation
│ ├── __init__.py
│ ├── test_mutation.py
│ └── test_mutation_filter.py
└── test_runner
│ ├── __init__.py
│ ├── test_exceptions.py
│ ├── test_file_editor.py
│ ├── test_result_test.py
│ ├── test_runner.py
│ └── truffle
│ ├── __init__.py
│ └── test_truffle_runner.py
└── vertigo.py
/.bumpversion.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 1.3.0
3 | commit = True
4 | tag = True
5 |
6 | [bumpversion:file:setup.py]
7 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | name: "CodeQL"
7 |
8 | on:
9 | push:
10 | branches: [master]
11 | pull_request:
12 | # The branches below must be a subset of the branches above
13 | branches: [master]
14 | schedule:
15 | - cron: '0 3 * * 4'
16 |
17 | jobs:
18 | analyze:
19 | name: Analyze
20 | runs-on: ubuntu-latest
21 |
22 | strategy:
23 | fail-fast: false
24 | matrix:
25 | # Override automatic language detection by changing the below list
26 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
27 | language: ['python']
28 | # Learn more...
29 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection
30 |
31 | steps:
32 | - name: Checkout repository
33 | uses: actions/checkout@v2
34 | with:
35 | # We must fetch at least the immediate parents so that if this is
36 | # a pull request then we can checkout the head.
37 | fetch-depth: 2
38 |
39 | # If this run was triggered by a pull request event, then checkout
40 | # the head of the pull request instead of the merge commit.
41 | - run: git checkout HEAD^2
42 | if: ${{ github.event_name == 'pull_request' }}
43 |
44 | # Initializes the CodeQL tools for scanning.
45 | - name: Initialize CodeQL
46 | uses: github/codeql-action/init@v1
47 | with:
48 | languages: ${{ matrix.language }}
49 | # If you wish to specify custom queries, you can do so here or in a config file.
50 | # By default, queries listed here will override any specified in a config file.
51 | # Prefix the list here with "+" to use these queries and those in the config file.
52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main
53 |
54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
55 | # If this step fails, then you should remove it and run the build manually (see below)
56 | - name: Autobuild
57 | uses: github/codeql-action/autobuild@v1
58 |
59 | # ℹ️ Command-line programs to run using the OS shell.
60 | # 📚 https://git.io/JvXDl
61 |
62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
63 | # and modify them (or add more) to build your code if your project
64 | # uses a compiled language
65 |
66 | #- run: |
67 | # make bootstrap
68 | # make release
69 |
70 | - name: Perform CodeQL Analysis
71 | uses: github/codeql-action/analyze@v1
72 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # Created by https://www.gitignore.io/api/python,pycharm
3 | # Edit at https://www.gitignore.io/?templates=python,pycharm
4 |
5 | ### PyCharm ###
6 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
7 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
8 |
9 | # User-specific stuff
10 | .idea/**/workspace.xml
11 | .idea/**/tasks.xml
12 | .idea/**/usage.statistics.xml
13 | .idea/**/dictionaries
14 | .idea/**/shelf
15 |
16 | # Generated files
17 | .idea/**/contentModel.xml
18 |
19 | # Sensitive or high-churn files
20 | .idea/**/dataSources/
21 | .idea/**/dataSources.ids
22 | .idea/**/dataSources.local.xml
23 | .idea/**/sqlDataSources.xml
24 | .idea/**/dynamic.xml
25 | .idea/**/uiDesigner.xml
26 | .idea/**/dbnavigator.xml
27 |
28 | # Gradle
29 | .idea/**/gradle.xml
30 | .idea/**/libraries
31 |
32 | # Gradle and Maven with auto-import
33 | # When using Gradle or Maven with auto-import, you should exclude module files,
34 | # since they will be recreated, and may cause churn. Uncomment if using
35 | # auto-import.
36 | # .idea/modules.xml
37 | # .idea/*.iml
38 | # .idea/modules
39 | # *.iml
40 | # *.ipr
41 |
42 | # CMake
43 | cmake-build-*/
44 |
45 | # Mongo Explorer plugin
46 | .idea/**/mongoSettings.xml
47 |
48 | # File-based project format
49 | *.iws
50 |
51 | # IntelliJ
52 | out/
53 |
54 | # mpeltonen/sbt-idea plugin
55 | .idea_modules/
56 |
57 | # JIRA plugin
58 | atlassian-ide-plugin.xml
59 |
60 | # Cursive Clojure plugin
61 | .idea/replstate.xml
62 |
63 | # Crashlytics plugin (for Android Studio and IntelliJ)
64 | com_crashlytics_export_strings.xml
65 | crashlytics.properties
66 | crashlytics-build.properties
67 | fabric.properties
68 |
69 | # Editor-based Rest Client
70 | .idea/httpRequests
71 |
72 | # Android studio 3.1+ serialized cache file
73 | .idea/caches/build_file_checksums.ser
74 |
75 | ### PyCharm Patch ###
76 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
77 |
78 | # *.iml
79 | # modules.xml
80 | # .idea/misc.xml
81 | # *.ipr
82 |
83 | # Sonarlint plugin
84 | .idea/sonarlint
85 |
86 | ### Python ###
87 | # Byte-compiled / optimized / DLL files
88 | __pycache__/
89 | *.py[cod]
90 | *$py.class
91 |
92 | # C extensions
93 | *.so
94 |
95 | # Distribution / packaging
96 | .Python
97 | build/
98 | develop-eggs/
99 | dist/
100 | downloads/
101 | eggs/
102 | .eggs/
103 | lib/
104 | lib64/
105 | parts/
106 | sdist/
107 | var/
108 | wheels/
109 | pip-wheel-metadata/
110 | share/python-wheels/
111 | *.egg-info/
112 | .installed.cfg
113 | *.egg
114 | MANIFEST
115 |
116 | # PyInstaller
117 | # Usually these files are written by a python script from a template
118 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
119 | *.manifest
120 | *.spec
121 |
122 | # Installer logs
123 | pip-log.txt
124 | pip-delete-this-directory.txt
125 |
126 | # Unit test / coverage reports
127 | htmlcov/
128 | .tox/
129 | .nox/
130 | .coverage
131 | .coverage.*
132 | .cache
133 | nosetests.xml
134 | coverage.xml
135 | *.cover
136 | .hypothesis/
137 | .pytest_cache/
138 |
139 | # Translations
140 | *.mo
141 | *.pot
142 |
143 | # Django stuff:
144 | *.log
145 | local_settings.py
146 | db.sqlite3
147 | db.sqlite3-journal
148 |
149 | # Flask stuff:
150 | instance/
151 | .webassets-cache
152 |
153 | # Scrapy stuff:
154 | .scrapy
155 |
156 | # Sphinx documentation
157 | docs/_build/
158 |
159 | # PyBuilder
160 | target/
161 |
162 | # Jupyter Notebook
163 | .ipynb_checkpoints
164 |
165 | # IPython
166 | profile_default/
167 | ipython_config.py
168 |
169 | # pyenv
170 | .python-version
171 |
172 | # pipenv
173 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
174 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
175 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
176 | # install all needed dependencies.
177 | #Pipfile.lock
178 |
179 | # celery beat schedule file
180 | celerybeat-schedule
181 |
182 | # SageMath parsed files
183 | *.sage.py
184 |
185 | # Environments
186 | .env
187 | .venv
188 | env/
189 | venv/
190 | ENV/
191 | env.bak/
192 | venv.bak/
193 |
194 | # Spyder project settings
195 | .spyderproject
196 | .spyproject
197 |
198 | # Rope project settings
199 | .ropeproject
200 |
201 | # mkdocs documentation
202 | /site
203 |
204 | # mypy
205 | .mypy_cache/
206 | .dmypy.json
207 | dmypy.json
208 |
209 | # Pyre type checker
210 | .pyre/
211 |
212 | # End of https://www.gitignore.io/api/python,pycharm
213 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | python:
3 | - '3.6'
4 | install:
5 | - pip install -r requirements.txt
6 | script:
7 | - pytest
8 | deploy:
9 | provider: pypi
10 | user: joranhonig
11 | password:
12 | secure: VQG9IxgTBrWjjQWnfpe1P3v9Sz9Yd+K/kY8snFSLeGw2GStcbvBb8CGShqRG8bYemfw8gEcO2oHR/yGcFFk6PE6xWSxb1Knmn/pLdwM1IO3AfL2WBvznuK6vn0jWwZYlayRyWdTmDTXZ428Q1d/t1ilNMvNKmf5EZ2jUHGi+Pca1t/Q0AH/DIQbBFGb8PnmGNHkmzcAisTHoHy6VVGk/j/F93unOH7zA5nLtKcUxA83ncRd9cwSNyi/upHBgnzKBnvxrUyqikwZ3QD1ULl/s5tGrUDQSNJjHNvH7RVSLYYk8TVBaNNRWEnaXK3HCMC5eh+Bnf5F7H989fLxGfN12p/wyR2XyroLr4MTET/qR7N6CkXedwbXf6n1UQargspg8YyzsLhW8CAh07uVacZtW1lhJMv1LXYUjgtJ9necvWw308r0eLdZZb8aSxXy6Bjg75mYR0ZEpIwrsmBuTbMzJwJVeDzOKYLSYgQnT2CKhdK7S5N7eZdwgWwb6S1ZVQfN8Keci4R29R6ZEUmWXs5bHGxpzvG8x4IoBnW8WD9cqmCYRsZlg6n5g/+jA2nOFBIf7wCBlw1wSCmL2xmIZuOGuJnXgSdEjc4SVQcyBaH02niGtbYmzYRj9izAgbpzl9F5dIiEaovnQ8/jjzTMyuAUbFerrD7AFAC/wrOY/lNccnhs=
13 | on:
14 | tags: true
15 | python: 3.6
16 | twine_version: latest
17 | distributions: sdist bdist_wheel
18 | skip_cleanup: true
19 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include eth_vertigo/core/templates/*
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # vertigo
2 | [](https://travis-ci.org/JoranHonig/vertigo)
3 | [](https://gitter.im/eth-vertigo/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
4 |
5 | Vertigo is a mutation testing framework designed to work specifically for smart contracts.
6 | This mutation testing framework implements a range of mutation operators that are either selected from previous works or tailored to solidity.
7 |
8 | ### Quick Start Guide
9 |
10 | To install vertigo, execute the following command:
11 | ```bash
12 | pip3 install --user eth-vertigo
13 | ```
14 |
15 | You can now run vertigo on a truffle project with the following command (assuming you have a `development` network configured in your`truffle-config.js`):
16 |
17 | ```bash
18 | vertigo run --network development
19 | ```
20 | Depending on your environment it might be required to specify the location of the truffle executable:
21 | ```bash
22 | vertigo run --network development --truffle-location /bin/truffle
23 | ```
24 |
25 | Or, if you're using Hardhat, just use dynamic networks:
26 | ```bash
27 | vertigo run --hardhat-parallel 8
28 | ```
29 |
30 | There are a few additional parameters available that allow you to tweak the execution of vertigo:
31 | ```bash
32 | $ vertigo run --help
33 | Usage: vertigo run [OPTIONS]
34 |
35 | Performs a core test campaign
36 |
37 | Options:
38 | --output TEXT Output core test results to file
39 | --network TEXT Network names that vertigo can use
40 | --ganache-path TEXT Path to ganache binary
41 | --ganache-network ...
42 | Dynamic networks that vertigo can use eg.
43 | (develop, 8485)
44 |
45 | --ganache-network-options TEXT Options to pass to dynamic ganache networks
46 | --hardhat-parallel INTEGER Amount of networks that hardhat should be
47 | using in parallel
48 |
49 | --rules TEXT Universal Mutator style rules to use in
50 | mutation testing
51 |
52 | --truffle-location TEXT Location of truffle cli
53 | --sample-ratio FLOAT If this option is set. Vertigo will apply
54 | the sample filter with the given ratio
55 |
56 | --exclude TEXT Vertigo won't mutate files in these
57 | directories
58 |
59 | --incremental TEXT File where incremental mutation state is
60 | stored
61 |
62 | --help Show this message and exit.
63 |
64 | ```
65 |
66 | ### Known Issues
67 |
68 | **Ganache** is generally used only for a single run of the entire test suite.
69 | For the general use case, it does not matter if Ganache creates a few thousand files.
70 | Unfortunately, once you start executing the entire test suite hundreds of times, you can end up with millions of files, and your machine could run out of free inode's.
71 | You can check whether this happens to you by running:
72 | ```
73 | df -i
74 | ```
75 |
76 | This issue ([#1](https://github.com/JoranHonig/vertigo/issues/1)) is known, and we're working on a fix.
77 |
78 | In the meanwhile. If your test suite is large enough to munch all your inodes, then there are two options:
79 | - You can use the command line option `--sample-ratio` to select a random subsample of the mutations (reducing the number of times that the test suite is run)
80 | - You can create a partition that has a sufficient amount of inodes available
81 |
82 | ### Publications and Articles
83 | [Practical Mutation Testing for Smart Contracts](https://link.springer.com/chapter/10.1007/978-3-030-31500-9_19) - Joran J. Honig, Maarten H. Everts, Marieke Huisman
84 |
85 | [Introduction into Mutation Testing](https://medium.com/swlh/introduction-into-mutation-testing-d6512dc702b0?source=friends_link&sk=2878e0c08b6301a125198a264e43edb4) - Joran Honig
86 |
87 | [Mutation Testing for Smart Contracts - A step by step guide](https://medium.com/@joran.honig/mutation-testing-for-smart-contracts-a-step-by-step-guide-68c838ca2094) - Joran Honig
88 |
89 | If you want to cite vertigo, please use the following:
90 | ```
91 | @InProceedings{10.1007/978-3-030-31500-9_19,
92 | author="Honig, Joran J.
93 | and Everts, Maarten H.
94 | and Huisman, Marieke",
95 | title="Practical Mutation Testing for Smart Contracts",
96 | booktitle="Data Privacy Management, Cryptocurrencies and Blockchain Technology",
97 | year="2019",
98 | publisher="Springer International Publishing",
99 | pages="289--303"
100 | }
101 | ```
102 |
--------------------------------------------------------------------------------
/eth_vertigo/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JoranHonig/vertigo/659b71cf763106c0d0fc8c1ce130658cece997c7/eth_vertigo/__init__.py
--------------------------------------------------------------------------------
/eth_vertigo/cli/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JoranHonig/vertigo/659b71cf763106c0d0fc8c1ce130658cece997c7/eth_vertigo/cli/__init__.py
--------------------------------------------------------------------------------
/eth_vertigo/cli/main.py:
--------------------------------------------------------------------------------
1 | import click
2 | from os import getcwd
3 | from pathlib import Path
4 | from eth_vertigo.core import MutationResult
5 | from eth_vertigo.core.network import DynamicNetworkPool, StaticNetworkPool, Ganache
6 | from eth_vertigo.interfaces.truffle import TruffleCampaign
7 | from eth_vertigo.interfaces.hardhat import HardhatCampaign
8 | from eth_vertigo.core.filters.sample_filter import SampleFilter
9 | from eth_vertigo.core.filters.exclude_filter import ExcludeFilter
10 | from eth_vertigo.test_runner.exceptions import TestRunException
11 | from eth_vertigo.mutator.universal_mutator import UniversalMutator
12 |
13 | from eth_vertigo.incremental import IncrementalRecorder, IncrementalMutationStore, IncrementalSuggester
14 |
15 | from tqdm import tqdm
16 |
17 |
18 | @click.group(help="Mutation testing framework for smart contracts")
19 | def cli():
20 | pass
21 |
22 |
23 | @cli.command(help="Performs a core test campaign")
24 | @click.option('--output', help="Output core test results to file", nargs=1, type=str)
25 | @click.option('--network', help="Network names that vertigo can use", multiple=True)
26 | @click.option('--ganache-path', help="Path to ganache binary", type=str, default="ganache-cli")
27 | @click.option('--ganache-network', help="Dynamic networks that vertigo can use eg. (develop, 8485)",
28 | multiple=True, type=(str, int))
29 | @click.option('--ganache-network-options', help="Options to pass to dynamic ganache networks", type=str)
30 | @click.option('--hardhat-parallel', help="Amount of networks that hardhat should be using in parallel", type=int)
31 | @click.option('--rules', help="Universal Mutator style rules to use in mutation testing", multiple=True)
32 | @click.option('--truffle-location', help="Location of truffle cli", nargs=1, type=str, default="truffle")
33 | @click.option('--sample-ratio', help="If this option is set. Vertigo will apply the sample filter with the given ratio", nargs=1, type=float)
34 | @click.option('--exclude', help="Vertigo won't mutate files in these directories", multiple=True)
35 | @click.option('--incremental', help="File where incremental mutation state is stored",
36 | type=str)
37 | def run(
38 | output,
39 | network,
40 | ganache_path,
41 | ganache_network,
42 | ganache_network_options,
43 | hardhat_parallel,
44 | rules,
45 | truffle_location,
46 | sample_ratio,
47 | exclude,
48 | incremental
49 | ):
50 | """ Run command """
51 | click.echo("[*] Starting mutation testing")
52 |
53 | # Setup global parameters
54 |
55 | working_directory = getcwd()
56 | project_type = _directory_type(working_directory)
57 | filters = []
58 | if exclude:
59 | filters.append(ExcludeFilter(exclude))
60 |
61 | if network and ganache_network:
62 | click.echo("[-] Can't use dynamic networks and regular networks simultaniously")
63 | exit(1)
64 |
65 | test_suggesters = []
66 | if incremental:
67 | incremental_store_file = Path(incremental)
68 | if not incremental_store_file.exists():
69 | pass
70 | elif not incremental_store_file.is_file() :
71 | click.echo(f"Incremental file {incremental} is not a file")
72 | exit(0)
73 | else:
74 | store = IncrementalMutationStore.from_file(incremental_store_file)
75 | test_suggesters.append(IncrementalSuggester(store))
76 |
77 | click.echo("[*] Starting analysis on project")
78 | project_path = Path(working_directory)
79 |
80 | if project_type == "truffle":
81 | if not (project_path / "contracts").exists():
82 | click.echo("[-] No contracts directory in project")
83 | elif not (project_path / "test").exists():
84 | click.echo("[-] No test directory found in project")
85 |
86 | if project_type:
87 | if sample_ratio:
88 | filters.append(SampleFilter(sample_ratio))
89 |
90 | mutators = []
91 | if rules:
92 | um = UniversalMutator()
93 | for rule_file in rules:
94 | um.load_rule(Path(rule_file))
95 | mutators.append(um)
96 |
97 | network_pool = None
98 | if hardhat_parallel:
99 | if project_type != "hardhat":
100 | click.echo("[+] Not running analysis on hardhat project, ignoring hardhat parallel option")
101 | else:
102 | network_pool = StaticNetworkPool(["_not_used_anywhere_"] * hardhat_parallel)
103 |
104 | if network_pool and (network or ganache_network):
105 | click.echo("[*] Both a hardhat network pool is set up and custom networks. Only using hardhat networks")
106 | elif network:
107 | network_pool = StaticNetworkPool(network)
108 | elif ganache_network:
109 | network_pool = DynamicNetworkPool(
110 | ganache_network,
111 | lambda port: Ganache(port, ganache_network_options.split(' ') if ganache_network_options else [],
112 | ganache_path)
113 | )
114 |
115 | if not network_pool:
116 | click.echo("[-] Vertigo needs at least one network to run analyses on")
117 | return
118 |
119 | try:
120 | if project_type == "truffle":
121 | campaign = TruffleCampaign(
122 | truffle_location=truffle_location,
123 | project_directory=project_path,
124 | mutators=mutators,
125 | network_pool=network_pool,
126 | filters=filters,
127 | suggesters=test_suggesters,
128 | )
129 | if project_type == "hardhat":
130 | campaign = HardhatCampaign(
131 | hardhat_command=["npx", "hardhat"],
132 | project_directory=project_path,
133 | mutators=mutators,
134 | network_pool=network_pool,
135 | filters=filters,
136 | suggesters=test_suggesters,
137 | )
138 | except:
139 | click.echo("[-] Encountered an error while setting up the core campaign")
140 | if isinstance(network_pool, DynamicNetworkPool):
141 | networks = network_pool.claimed_networks.keys()
142 | else:
143 | networks = network_pool.claimed_networks[:]
144 | for node in networks:
145 | click.echo(f"[+] Cleaning up network: {node}")
146 | network_pool.yield_network(node)
147 | raise
148 | else:
149 | click.echo("[*] Could not find supported project directory in {}".format(working_directory))
150 | return
151 |
152 | click.echo("[*] Initializing campaign run ")
153 |
154 | try:
155 | campaign.setup()
156 | click.echo("[*] Checking validity of project")
157 | if not campaign.valid():
158 | click.echo("[-] We couldn't get valid results by running the truffle tests.\n Aborting")
159 | return
160 |
161 | click.echo("[+] The project is valid")
162 | click.echo("[*] Storing compilation results")
163 | campaign.store_compilation_results()
164 | click.echo("[*] Running analysis on {} mutants".format(len(campaign.mutations)))
165 | with tqdm(total=len(campaign.mutations), unit="mutant") as pbar:
166 | report = campaign.run(lambda: pbar.update(1) and pbar.refresh(), threads=max(network_pool.size, 1))
167 | pbar.close()
168 |
169 | except TestRunException as e:
170 | click.echo("[-] Encountered an error while running the framework's test command:")
171 | click.echo(e)
172 | return
173 | except Exception as e:
174 | click.echo("[-] Encountered an error while running the core campaign")
175 | click.echo(e)
176 | raise
177 |
178 | click.echo("[*] Done with campaign run")
179 | click.echo("[+] Report:")
180 | click.echo(report.render())
181 |
182 | click.echo("[+] Survivors")
183 | for mutation in report.mutations:
184 | if mutation.result == MutationResult.LIVED:
185 | click.echo(str(mutation))
186 |
187 | if output:
188 | output_path = Path(output)
189 | if not output_path.exists() or click.confirm("[*] There already exists something at {}. Overwrite ".format(str(output_path))):
190 | click.echo("Result of core run can be found at: {}".format(output))
191 | output_path.write_text(report.render(with_mutations=True), "utf-8")
192 |
193 | if incremental:
194 | incremental_store_file = Path(incremental)
195 | if not incremental_store_file.exists() or \
196 | click.confirm(f"[*] There already exists an incremental at {incremental_store_file.name}. Overwrite "):
197 | new_incremental_store = IncrementalRecorder().record(report.mutations)
198 | new_incremental_store.to_file(incremental_store_file)
199 |
200 | click.echo("[*] Done! ")
201 |
202 |
203 | def _directory_type(working_directory: str):
204 | """ Determines the current framework in the current directory """
205 | wd = Path(working_directory)
206 | has_truffle_config = (wd / "truffle.js").exists() or (wd / "truffle-config.js").exists()
207 | has_hardhat_config = (wd / "hardhat.config.js").exists()
208 | if has_truffle_config and not has_hardhat_config:
209 | return "truffle"
210 | if has_hardhat_config and not has_truffle_config:
211 | return "hardhat"
212 | return None
213 |
--------------------------------------------------------------------------------
/eth_vertigo/core/__init__.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.core.mutation import Mutation, MutationResult
2 | from eth_vertigo.core.suggest import TestSuggester
--------------------------------------------------------------------------------
/eth_vertigo/core/campaign.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from abc import abstractmethod, ABC
3 | from concurrent.futures import ThreadPoolExecutor
4 | from pathlib import Path
5 | from time import time
6 | from typing import List, Callable
7 |
8 | from jinja2 import PackageLoader, Environment
9 |
10 | from eth_vertigo.core import Mutation, MutationResult
11 | from eth_vertigo.core import TestSuggester
12 | from eth_vertigo.core.filter import MutationFilter
13 | from eth_vertigo.core.network import NetworkPool
14 | from eth_vertigo.interfaces.generics import Compiler, Tester
15 | from eth_vertigo.mutator.mutator import Mutator
16 | from eth_vertigo.mutator.solidity.solidity_mutator import SolidityMutator
17 | from eth_vertigo.mutator.source_file import SourceFile
18 | from eth_vertigo.test_runner.exceptions import EquivalentMutant
19 | from eth_vertigo.test_runner.exceptions import TestRunException, TimedOut
20 |
21 | environment = Environment(
22 | loader=PackageLoader("eth_vertigo.core"), trim_blocks=True
23 | )
24 |
25 |
26 | class CampaignReport:
27 | def __init__(self, mutations):
28 | """
29 | Constructs a campaign report object
30 | :param mutations: Mutations for this campaign
31 | """
32 | self.mutations = mutations
33 |
34 | @property
35 | def mutation_count(self):
36 | """ Amount of mutations in this report """
37 | return len(self.mutations)
38 |
39 | @property
40 | def nr_killed(self):
41 | """ Amount of killed mutations in this report"""
42 | return len([e for e in self.mutations if e.result == MutationResult.KILLED])
43 |
44 | @property
45 | def nr_alive(self):
46 | """ Amount of alive mutations in this report"""
47 | return len([e for e in self.mutations if e.result == MutationResult.LIVED])
48 |
49 | def render(self, with_mutations=False):
50 | template = environment.get_template("report_template.jinja2")
51 | return template.render(
52 | nr_mutations=len(self.mutations),
53 | nr_killed=self.nr_killed,
54 | mutations=self.mutations,
55 | with_mutations=with_mutations
56 | )
57 |
58 |
59 | class Campaign:
60 | """
61 | A core campaign class orchestrates and manages a core testing run
62 | """
63 |
64 | def __init__(self, filters: List[MutationFilter] = None , suggesters=None):
65 | self.sources = []
66 | self.project_directory = None
67 | self.mutations = []
68 | self.is_set_up = False
69 | self.filters = filters or []
70 | self.suggesters = suggesters or [] # type: List[TestSuggester]
71 |
72 | def setup(self):
73 | """ Sets up the campaign for execution"""
74 | raise NotImplementedError
75 |
76 | def run(self, progress_callback: Callable, threads=1):
77 | """ Starts a core testing campaign"""
78 | if not self.is_set_up:
79 | raise ValueError("This campaign is not setup yet")
80 |
81 | report = CampaignReport(self.mutations)
82 | with ThreadPoolExecutor(max_workers=threads) as e:
83 | for mutation in self.mutations:
84 | e.submit(self.test_mutation, mutation, progress_callback)
85 | e.shutdown()
86 | return report
87 |
88 | def test_mutation(self, mutation: Mutation, done_callback: Callable):
89 | """ Run the test suite using a core and check for murders """
90 | raise NotImplementedError
91 |
92 | def valid(self):
93 | """ Checks whether the current project is valid """
94 | raise NotImplementedError
95 |
96 | def store_compilation_results(self):
97 | """ Stores compilation results for trivial compiler equivalence"""
98 | raise NotImplementedError
99 |
100 |
101 | class BaseCampaign(ABC, Campaign):
102 | def __init__(
103 | self,
104 | project_directory: Path,
105 | mutators: List[Mutator],
106 | network_pool: NetworkPool,
107 |
108 | compiler: Compiler,
109 | tester: Tester,
110 | source_file_builder: Callable[[Path, str], SourceFile],
111 |
112 | filters=None,
113 | suggesters=None
114 | ):
115 | super().__init__(filters=filters, suggesters=suggesters)
116 |
117 | self.project_directory = project_directory
118 | self.source_directory = project_directory / "build" / "contracts"
119 |
120 | self.compiler = compiler
121 | self.tester = tester
122 | self.source_file_builder = source_file_builder
123 |
124 | self.sources = list(self._get_sources())
125 | self.base_run_time = None
126 | self.network_pool = network_pool
127 | self.bytecodes = {}
128 |
129 | self.mutators = mutators
130 | self.mutators.append(SolidityMutator())
131 |
132 | @abstractmethod
133 | def _get_sources(self, dir=None):
134 | """ Implements basic mutator file discovery """
135 | pass
136 |
137 | def valid(self):
138 | """ Checks whether the current project is valid """
139 | begin = time()
140 |
141 | network = None
142 | try:
143 | network = self.network_pool.claim()
144 | except ValueError:
145 | return False
146 |
147 | try:
148 | test_result = self.tester.run_tests(network=network)
149 | finally:
150 | self.network_pool.yield_network(network)
151 |
152 | self.base_run_time = time() - begin
153 | if test_result is None:
154 | return False
155 |
156 | return all([result.success for result in test_result.values()])
157 |
158 | def setup(self):
159 | for source in self.sources:
160 | for mutator in self.mutators:
161 | self.mutations += mutator.mutate(source, self.project_directory)
162 | for f in self.filters:
163 | self.mutations = f.apply(self.mutations)
164 | self.is_set_up = True
165 |
166 |
167 | def test_mutation(self, mutation: Mutation, done_callback: Callable):
168 | """ Run the test suite using a core and check for murders """
169 | mutation.result = MutationResult.LIVED
170 | try:
171 | network = self.network_pool.claim()
172 | except ValueError:
173 | mutation.result = MutationResult.ERROR
174 | return
175 | suggestions = []
176 | for suggester in self.suggesters:
177 | if suggester.is_strict:
178 | # Not yet Implemented
179 | continue
180 | suggestions.extend(suggester.suggest_tests(mutation))
181 |
182 | try:
183 | try:
184 | test_result = self.tester.run_tests(
185 | mutation=mutation,
186 | timeout=int(self.base_run_time) * 2,
187 | network=network,
188 | original_bytecode=self.bytecodes,
189 | keep_test_names=suggestions if suggestions else None
190 | )
191 | killers = [test for test in test_result.values() if not test.success]
192 | if killers:
193 | mutation.result = MutationResult.KILLED
194 | mutation.crime_scenes = [killer.full_title for killer in killers]
195 | elif suggestions:
196 | # If the suggestions didn't lead to a killer
197 | test_result = self.tester.run_tests(
198 | mutation=mutation,
199 | timeout=int(self.base_run_time) * 2,
200 | network=network,
201 | original_bytecode=self.bytecodes,
202 | )
203 | killers = [test for test in test_result.values() if not test.success]
204 | if killers:
205 | mutation.result = MutationResult.KILLED
206 | mutation.crime_scenes = [killer.full_title for killer in killers]
207 | except EquivalentMutant:
208 | mutation.result = MutationResult.EQUIVALENT
209 | except TimedOut:
210 | mutation.result = MutationResult.TIMEDOUT
211 | except TestRunException as e:
212 | logging.warning(str(e))
213 | mutation.result = MutationResult.ERROR
214 | except Exception as e:
215 | print(e)
216 | finally:
217 | self.network_pool.yield_network(network)
218 | done_callback()
219 | return
220 |
221 | def store_compilation_results(self):
222 | """ Stores compilation results for trivial compiler equivalence"""
223 | self.bytecodes = self.compiler.get_bytecodes(working_directory=str(self.project_directory))
224 |
--------------------------------------------------------------------------------
/eth_vertigo/core/filter.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 | from eth_vertigo.core.mutation import Mutation
3 |
4 |
5 | class MutationFilter:
6 | """ A core filter provides an interface to selectively filter test cases"""
7 |
8 | def apply(self, mutations: List[Mutation]) -> List[Mutation]:
9 | """ Apply this filter to a list of mutations
10 |
11 | :param mutations: The mutations to filter
12 | :return: The resulting list
13 | """
14 | raise NotImplementedError
15 |
--------------------------------------------------------------------------------
/eth_vertigo/core/filters/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JoranHonig/vertigo/659b71cf763106c0d0fc8c1ce130658cece997c7/eth_vertigo/core/filters/__init__.py
--------------------------------------------------------------------------------
/eth_vertigo/core/filters/exclude_filter.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 | from eth_vertigo.core.mutation import Mutation
3 | from eth_vertigo.core.filter import MutationFilter
4 | from random import sample
5 |
6 |
7 | class ExcludeFilter(MutationFilter):
8 | """ A exclusion filter to ignore files from specific directories """
9 |
10 | def __init__(self, prefixes: List[str]):
11 | """ Creates an exclusion filter """
12 | self.prefixes = prefixes
13 | super().__init__()
14 |
15 | def apply(self, mutations: List[Mutation]) -> List[Mutation]:
16 | """ Apply this filter to a list of mutations
17 |
18 | :param mutations: The mutations to filter
19 | :return: The resulting list
20 | """
21 |
22 | def should_not_filter(mutation):
23 | for prefix in self.prefixes:
24 | if str(mutation.relative_path).startswith(prefix) or str(mutation.relative_path).startswith("/" + prefix):
25 | return False
26 | return True
27 |
28 | return list(filter(should_not_filter, mutations))
29 |
--------------------------------------------------------------------------------
/eth_vertigo/core/filters/sample_filter.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 | from eth_vertigo.core.mutation import Mutation
3 | from eth_vertigo.core.filter import MutationFilter
4 | from random import sample
5 |
6 |
7 | class SampleFilter(MutationFilter):
8 | """ A sample filter which takes a random subset of the mutations along a defined ratio"""
9 |
10 | def __init__(self, ratio: float = 0.1):
11 | """ Creates a sample filter
12 |
13 | :param ratio: ratio of mutations to select
14 | """
15 | self.ratio = ratio
16 | super().__init__()
17 |
18 | def apply(self, mutations: List[Mutation]) -> List[Mutation]:
19 | """ Apply this filter to a list of mutations
20 |
21 | :param mutations: The mutations to filter
22 | :return: The resulting list
23 | """
24 | return sample(mutations, int(self.ratio * len(mutations)))
25 |
--------------------------------------------------------------------------------
/eth_vertigo/core/mutation.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from eth_vertigo.mutator.source_file import SourceFile
3 | from typing import Tuple
4 | from enum import Enum
5 | from jinja2 import PackageLoader, Environment
6 |
7 | environment = Environment(
8 | loader=PackageLoader("eth_vertigo.core"), trim_blocks=True
9 | )
10 |
11 | class MutationResult(Enum):
12 | KILLED = 1
13 | LIVED = 2
14 | TIMEDOUT = 3
15 | ERROR = 4
16 | EQUIVALENT = 5
17 |
18 |
19 | _mutationresult_string = {
20 | MutationResult.KILLED: "Killed",
21 | MutationResult.LIVED: "Lived",
22 | MutationResult.TIMEDOUT: "Timeout",
23 | MutationResult.ERROR: "Error",
24 | MutationResult.EQUIVALENT: "Equivalent",
25 | None: "None"
26 | }
27 |
28 |
29 | class Mutation:
30 | """
31 | A core class contains specific information on a single core
32 | """
33 |
34 | def __init__(
35 | self,
36 | location: Tuple[int, int, int],
37 | source: SourceFile,
38 | value: str,
39 | project_directory: Path
40 | ):
41 | """
42 | Initializes a core
43 | :param location: Location of the core, in the src format (offset, length, file_index)
44 | :param source: Source file for which the core is to be applied
45 | :param value: New value that the location should take on
46 | :param project_directory: Project directory Path of the project directory that eth_vertigo is working in
47 | """
48 | self.location = location
49 | self.source = source
50 | self.project_directory = project_directory
51 | self.value = value
52 |
53 | # The following parameters are used to track how and when this core was killed
54 | self.result = None
55 | self.crime_scenes = []
56 |
57 | @property
58 | def relative_path(self):
59 | """ Gets the relative path of the mutator wrt the project directory """
60 | r_path = self.source.file.relative_to(self.project_directory)
61 | return str(r_path)
62 |
63 | @staticmethod
64 | def _get_mutated_line(offset, text):
65 | cursor = 0
66 | for i, line in enumerate(text.splitlines(keepends=True)):
67 | if len(line) + cursor > offset:
68 | return i, line.replace("\t", "")
69 | cursor += len(line)
70 |
71 | @property
72 | def source_file_name(self):
73 | return self.source.file.name
74 |
75 | @property
76 | def original_value(self):
77 | source_content = self.source.file.read_text('utf-8')
78 | return source_content[self.location[0]: self.location[0] + self.location[1]]
79 |
80 | @property
81 | def line_number(self):
82 | source_content = self.source.file.read_text('utf-8')
83 | line_nr, _ = self._get_mutated_line(self.location[0], source_content)
84 | return line_nr
85 |
86 | def __repr__(self) -> str:
87 | """ Prints information that can be used to triage a core """
88 | template = environment.get_template("mutation_template.jinja2")
89 | source_content = self.source.file.read_text('utf-8')
90 | line_nr, og_line = self._get_mutated_line(self.location[0], source_content)
91 |
92 | mutated = source_content[:self.location[0]] \
93 | + self.value + \
94 | source_content[self.location[0] + self.location[1]:]
95 |
96 | _, mut_line = self._get_mutated_line(self.location[0], mutated)
97 |
98 | return template.render(
99 | file_name=str(self.source.file),
100 | line_number=line_nr,
101 | original_line=og_line,
102 | mutated_line=mut_line,
103 | result=_mutationresult_string[self.result]
104 | )
105 |
--------------------------------------------------------------------------------
/eth_vertigo/core/network/__init__.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.core.network.ganache import Ganache
2 |
3 | from eth_vertigo.core.network.pool import NetworkPool, StaticNetworkPool, DynamicNetworkPool
--------------------------------------------------------------------------------
/eth_vertigo/core/network/ganache.py:
--------------------------------------------------------------------------------
1 | from subprocess import Popen, run, getoutput, PIPE
2 | from typing import Optional
3 | from tempfile import TemporaryFile
4 | from time import sleep
5 | from loguru import logger
6 | DEFAULT_GANACHE_PARAMETERS = [] # ["--dbMemdown"]
7 |
8 |
9 | class Ganache:
10 | def __init__(self, port, parameters, ganache_binary="ganache"):
11 | # Remove any pre-set port options
12 | self.parameters = parameters
13 | self.parameters.extend(["--port", str(port)])
14 |
15 | for param in DEFAULT_GANACHE_PARAMETERS:
16 | if param in self.parameters:
17 | continue
18 | self.parameters.append(param)
19 |
20 | self.ganache_binary = ganache_binary
21 | self.process = None # type: Optional[subprocess.Popen]
22 |
23 | def start(self):
24 | if self.process is not None:
25 | raise ValueError("Process has already been terminated")
26 |
27 | self.process = Popen(
28 | [self.ganache_binary] + self.parameters,
29 | stderr=PIPE, stdout=PIPE
30 | )
31 |
32 | while True:
33 | line = self.process.stdout.readline()
34 | if "Listening on" in str(line):
35 | break
36 |
37 | if self.process.poll() is not None:
38 | raise Exception("Could not create ganache network")
39 |
40 | def stop(self):
41 | if self.process is None:
42 | raise ValueError("Process has not yet been started")
43 | if self.process.poll():
44 | raise ValueError("Process has already terminated")
45 | self.process.terminate()
46 |
--------------------------------------------------------------------------------
/eth_vertigo/core/network/pool.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod, ABC
2 | from typing import Tuple, Callable, List, Dict
3 |
4 | import threading
5 |
6 | class Network:
7 | def __init__(self, name: str, port: int, provider=None):
8 | self.name = name
9 | self.port = port
10 | self.provider = provider
11 |
12 |
13 | class NetworkPool(ABC):
14 | @abstractmethod
15 | def claim(self) -> str:
16 | pass
17 |
18 | @abstractmethod
19 | def yield_network(self, network: str):
20 | pass
21 |
22 | @property
23 | @abstractmethod
24 | def size(self):
25 | pass
26 |
27 |
28 | class StaticNetworkPool(NetworkPool):
29 | def __init__(self, networks: List[str]):
30 | self.available_networks = list(networks)
31 | self.claimed_networks = []
32 | self.lock = threading.Lock()
33 |
34 | def claim(self) -> str:
35 | self.lock.acquire()
36 | try:
37 | if not self.available_networks:
38 | raise ValueError("No network available")
39 |
40 | network = self.available_networks.pop()
41 | self.claimed_networks.append(network)
42 | return network
43 | finally:
44 | self.lock.release()
45 |
46 | def yield_network(self, network: str):
47 | self.lock.acquire()
48 | try:
49 | if network not in self.claimed_networks:
50 | raise ValueError("Trying to yield unclaimed network")
51 | self.claimed_networks.remove(network)
52 | self.available_networks.append(network)
53 | finally:
54 | self.lock.release()
55 |
56 | @property
57 | def size(self):
58 | self.lock.acquire()
59 | try:
60 | return len(self.available_networks) + len(self.claimed_networks)
61 | finally:
62 | self.lock.release()
63 |
64 |
65 | class DynamicNetworkPool(NetworkPool):
66 | def __init__(self, networks: List[Tuple[str, int]], builder: Callable):
67 | self.available_networks = {n[0]: Network(n[0], n[1]) for n in networks} # type: Dict
68 | self.claimed_networks = {}
69 | self.builder = builder
70 | self.lock = threading.Lock()
71 |
72 | def claim(self) -> str:
73 | self.lock.acquire()
74 | try:
75 | if not self.available_networks:
76 | raise ValueError("No network available")
77 |
78 | # Claim one network from the available networks
79 | network = self.available_networks.pop(list(self.available_networks.keys())[0])
80 |
81 | # Put it in the claimed networks
82 | self.claimed_networks[network.name] = network
83 | network.provider = self.builder(network.port)
84 | finally:
85 | self.lock.release()
86 | # Spin up the dynamic network
87 | try:
88 | network.provider.start()
89 | except ValueError:
90 | raise
91 | return network.name
92 |
93 |
94 | def yield_network(self, network: str):
95 | self.lock.acquire()
96 | try:
97 | if network not in self.claimed_networks:
98 | raise ValueError("Network not claimed")
99 |
100 | network = self.claimed_networks.pop(network)
101 |
102 | # Spin down the dynamic network
103 | try:
104 | network.provider.stop()
105 | except ValueError:
106 | raise
107 | self.available_networks[network.name] = network
108 | finally:
109 | self.lock.release()
110 | # Yield the network back
111 |
112 | @property
113 | def size(self):
114 | self.lock.acquire()
115 | try:
116 | return len(self.available_networks) + len(self.claimed_networks)
117 | finally:
118 | self.lock.release()
119 |
--------------------------------------------------------------------------------
/eth_vertigo/core/suggest.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import Tuple, List
3 | from eth_vertigo.core.mutation import Mutation
4 |
5 |
6 | class TestSuggester(ABC):
7 | """ TestSuggester interface definition
8 |
9 | A TestSuggester is used to provide suggestions for which tests to run to the mutation testing campaign.
10 | """
11 |
12 | @property
13 | @abstractmethod
14 | def is_strict(self) -> bool:
15 | """ Returns whether this suggester provides strict suggestions
16 |
17 | A strict suggestion is one where only the suggested tests should be ran.
18 |
19 | A non-strict suggestions is one where the suggested tests should be ran first, before executing
20 | the remainder of the test suite.
21 | """
22 | pass
23 |
24 | @abstractmethod
25 | def suggest_tests(self, mutation: Mutation) -> List:
26 | """ Request list of test from the suggester as to which tests should be ran on the mutation
27 |
28 | :param mutation: The subject mutation
29 |
30 | :return List of tests to run
31 | """
32 | pass
33 |
--------------------------------------------------------------------------------
/eth_vertigo/core/templates/mutation_template.jinja2:
--------------------------------------------------------------------------------
1 | Mutation:
2 | File: {{ file_name }}
3 | Line nr: {{ line_number }}
4 | Result: {{ result }}
5 | Original line:
6 | {{ original_line }}
7 | Mutated line:
8 | {{ mutated_line }}
9 |
--------------------------------------------------------------------------------
/eth_vertigo/core/templates/report_template.jinja2:
--------------------------------------------------------------------------------
1 | Mutation testing report:
2 | Number of mutations: {{ nr_mutations }}
3 | Killed: {{ nr_killed }} / {{ nr_mutations }}
4 |
5 | Mutations:
6 | {% if mutations and with_mutations %}
7 | {% for mutation in mutations %}
8 | {{ mutation }}
9 |
10 | {% endfor %}
11 | {% endif %}
--------------------------------------------------------------------------------
/eth_vertigo/incremental/__init__.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.incremental.record import IncrementalRecorder
2 | from eth_vertigo.incremental.store import IncrementalMutationStore
3 | from eth_vertigo.incremental.suggester import IncrementalSuggester
4 |
--------------------------------------------------------------------------------
/eth_vertigo/incremental/record.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 | from eth_vertigo.incremental.store import MutationRecord, IncrementalMutationStore
4 | from eth_vertigo.core import Mutation
5 |
6 |
7 | class IncrementalRecorder:
8 | def record(self, mutations: List[Mutation]) -> IncrementalMutationStore:
9 | store = IncrementalMutationStore()
10 | store.known_mutations = list(
11 | map(
12 | self._mutation_to_record,
13 | [m for m in mutations if m.crime_scenes]
14 | )
15 | )
16 | return store
17 |
18 | @staticmethod
19 | def _mutation_to_record(mutation: Mutation) -> MutationRecord:
20 | result = MutationRecord()
21 | result.new_text = mutation.value
22 | result.original_text = mutation.original_value
23 | result.source_file_name = mutation.source_file_name
24 | result.location = ":".join(map(str, mutation.location))
25 | result.line_number = mutation.line_number
26 | result.crime_scenes = mutation.crime_scenes
27 | return result
28 |
--------------------------------------------------------------------------------
/eth_vertigo/incremental/store.py:
--------------------------------------------------------------------------------
1 | import yaml
2 | from typing import List, Dict
3 | from pathlib import Path
4 |
5 |
6 | class IncrementalMutationStore:
7 | def __init__(self):
8 | self.known_mutations = [] # type: List[MutationRecord]
9 |
10 | @property
11 | def yaml(self):
12 | return yaml.dump({"known_mutations": [m.as_dict for m in self.known_mutations]})
13 |
14 | @staticmethod
15 | def from_yaml(data):
16 | values = yaml.safe_load(data)
17 | result = IncrementalMutationStore()
18 | result.known_mutations = [MutationRecord.from_dict(record) for record in values.get("known_mutations", [])]
19 | return result
20 |
21 | @staticmethod
22 | def from_file(file: Path):
23 | if not file.is_file():
24 | raise ValueError("Passed path is not a file")
25 |
26 | content = file.read_text('utf-8')
27 | return IncrementalMutationStore.from_yaml(content)
28 |
29 | def to_file(self, file: Path):
30 | if file.exists() and not file.is_file():
31 | raise ValueError("Passed path already exists and is not a file")
32 |
33 | file.write_text(self.yaml, "utf-8")
34 |
35 |
36 | class MutationRecord:
37 | def __init__(self):
38 | self.location = None
39 | self.original_text = None
40 | self.line_number = None
41 | self.source_file_name = None
42 | self.new_text = None
43 | self.crime_scenes = [] # type: List[str]
44 |
45 | @staticmethod
46 | def from_dict(data: Dict):
47 | result = MutationRecord()
48 | result.location = data.get("location", "")
49 | result.original_text = data.get("original_text", "")
50 | result.line_number = data.get("line_number", "")
51 | result.source_file_name = data.get("source_file_name", "")
52 | result.new_text = data.get("new_text", "")
53 | result.crime_scenes = data.get("crime_scenes", [])
54 | return result
55 |
56 | @property
57 | def as_dict(self):
58 | return self.__dict__
59 |
60 |
--------------------------------------------------------------------------------
/eth_vertigo/incremental/suggester.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.core import Mutation, TestSuggester
2 |
3 | from typing import List, Tuple
4 | from eth_vertigo.incremental.store import IncrementalMutationStore, MutationRecord
5 |
6 |
7 | class IncrementalSuggester(TestSuggester):
8 | def __init__(self, store: IncrementalMutationStore):
9 | self._store = store
10 |
11 | @property
12 | def is_strict(self) -> bool:
13 | return False
14 |
15 | def _equivalent(self, mutation: Mutation, record: MutationRecord):
16 | return record.source_file_name == mutation.source_file_name \
17 | and record.original_text == mutation.original_value \
18 | and record.new_text == mutation.value \
19 | and record.line_number == mutation.line_number
20 |
21 | def suggest_tests(self, mutation: Mutation) -> List:
22 | records = self._store.known_mutations
23 | relevant_records = [r for r in records if self._equivalent(mutation, r)]
24 | tests = []
25 | for r in relevant_records:
26 | tests.extend(r.crime_scenes)
27 | return list(set(tests))
28 |
29 |
30 |
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JoranHonig/vertigo/659b71cf763106c0d0fc8c1ce130658cece997c7/eth_vertigo/interfaces/__init__.py
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/common/__init__.py:
--------------------------------------------------------------------------------
1 | import re
2 | from typing import Dict
3 | from eth_vertigo.test_runner.test_result import TestResult
4 |
5 | swarm_hash_regex = re.compile("((a165)(.*)(5820)[a-f0-9]{64}(0029))$")
6 |
7 | def strip_metadata(bytecode: str) -> str:
8 | return swarm_hash_regex.sub("", bytecode)
9 |
10 |
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/common/tester.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 | from pathlib import Path
4 | from tempfile import mkdtemp
5 | from distutils.dir_util import copy_tree
6 | import shutil
7 |
8 | from eth_vertigo.core import Mutation
9 | from eth_vertigo.test_runner.file_editor import FileEditor
10 | from eth_vertigo.interfaces.generics import Tester
11 | from json import loads, JSONDecodeError
12 | from subprocess import Popen, TimeoutExpired
13 | from tempfile import TemporaryFile
14 | from typing import Dict, Union
15 | from eth_vertigo.test_runner.exceptions import TestRunException, TimedOut
16 | from eth_vertigo.test_runner import TestResult
17 |
18 | from eth_vertigo.test_runner.exceptions import EquivalentMutant
19 |
20 | from typing import List, Dict, Union
21 |
22 |
23 | def normalize_mocha(mocha_json: dict) -> Dict[str, TestResult]:
24 | tests = {}
25 | for failure in mocha_json["failures"]:
26 | tests[failure["fullTitle"]] = TestResult(failure["title"], failure["fullTitle"], failure["duration"], False)
27 | for success in mocha_json["passes"]:
28 | tests[success["fullTitle"]] = TestResult(success["title"], success["fullTitle"], success["duration"], True)
29 | return tests
30 |
31 |
32 | def make_temp_directory(original_dir: str):
33 | td = mkdtemp()
34 | copy_tree(original_dir, td, preserve_symlinks=1)
35 | return td
36 |
37 |
38 | def clean_build_directory(project_path: str, build_directory: str = "build"):
39 | build_dir = Path(project_path) / build_directory
40 | if build_dir.is_dir():
41 | shutil.rmtree(build_dir)
42 |
43 |
44 | def rm_temp_directory(temp_dir: str):
45 | shutil.rmtree(temp_dir)
46 |
47 |
48 | def apply_mutation(mutation: Mutation, working_directory):
49 | target_file_name = working_directory + '/' + mutation.relative_path
50 | FileEditor.edit(target_file_name, mutation.location, mutation.value)
51 |
52 |
53 | class MochaStdoutTester(Tester):
54 | def run_tests(
55 | self,
56 | coverage: bool = False,
57 | mutation: Mutation = None,
58 | timeout=None,
59 | network: str = None,
60 | original_bytecode: Dict[str, str] = None,
61 | keep_test_names: List[str] = None,
62 | ) -> dict:
63 | """
64 | Runs all the tests in the truffle project in a clean environment
65 | :param coverage: Whether to run the tests with coverage
66 | :param mutation: List indicating edits that need to be performed on mutator files
67 | :param timeout: Maximum duration that the test is allowed to take
68 | :param network: Name of the network that the test should be using
69 | :param original_bytecode: A dict of the original bytecodes (before mutation)
70 | :param keep_test_names: Only execute the tests in this list
71 | :return: Test results
72 | """
73 | if coverage:
74 | raise NotImplementedError
75 |
76 | temp_dir = make_temp_directory(self.project_directory)
77 | clean_build_directory(temp_dir)
78 |
79 | self.instrument_configuration(temp_dir, keep_test_names)
80 |
81 | if mutation:
82 | apply_mutation(mutation, temp_dir)
83 | try:
84 | if original_bytecode is not None and original_bytecode != {}:
85 | if self.compiler.check_bytecodes(temp_dir, original_bytecode):
86 | raise EquivalentMutant
87 | test_command = self.build_test_command(network)
88 | result = self.run_test_command(test_command, temp_dir, timeout=timeout)
89 | finally:
90 | rm_temp_directory(temp_dir)
91 |
92 | return result
93 |
94 | @abstractmethod
95 | def instrument_configuration(self, directory, keep_test_names):
96 | pass
97 |
98 | @abstractmethod
99 | def build_test_command(self, network: str):
100 | pass
101 |
102 | @staticmethod
103 | def run_test_command(
104 | command: str,
105 | working_directory: str,
106 | timeout=None
107 | ) -> Union[Dict[str, TestResult], None]:
108 | with TemporaryFile() as stdin, TemporaryFile() as stdout:
109 | stdin.seek(0)
110 | proc = Popen(command, stdin=stdin, stdout=stdout, stderr=stdout, cwd=working_directory)
111 | try:
112 | proc.wait(timeout=timeout)
113 | except TimeoutExpired:
114 | proc.kill()
115 | raise TimedOut
116 |
117 | stdout.seek(0)
118 | output = stdout.read()
119 |
120 | split = output.decode('utf-8').split("\n")
121 | errors = []
122 | test_result = []
123 | preamble = True
124 | for line in split:
125 | if line.startswith("Error"):
126 | errors.append(line)
127 | if line.startswith("{"):
128 | preamble = False
129 | if preamble:
130 | continue
131 | test_result.append(line)
132 |
133 | test_result = "\n".join(test_result)
134 |
135 | if errors:
136 | raise TestRunException("\n".join(errors))
137 | try:
138 | return normalize_mocha(loads(test_result))
139 | except JSONDecodeError:
140 | raise TestRunException("Encountered error during test output analysis")
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/generics.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Union, List
2 | from eth_vertigo.test_runner.test_result import TestResult
3 | from abc import ABC, abstractmethod
4 | from eth_vertigo.core import Mutation
5 |
6 |
7 | class Compiler:
8 | """Compiler test framework interface"""
9 | def run_compilation(self, working_directory: str) -> None:
10 | """ Executes compilation
11 |
12 | :param working_directory: The project directory to compile
13 | """
14 | raise NotImplementedError
15 |
16 | def get_bytecodes(self, working_directory: str) -> Dict[str, str]:
17 | """ Returns the bytecodes in the compilation result of the current directory
18 |
19 | :param working_directory: The truffle directory for which we retrieve the bytecodes
20 | :return: bytecodes in the shape {'contractName': '0x00'}
21 | """
22 | raise NotImplementedError
23 |
24 | def check_bytecodes(self, working_directory: str, original_bytecode: Dict[str, str]) -> bool:
25 | """ Returns whether any of the bytecodes differ from the original bytecodes
26 |
27 | :param working_directory: The truffle directory for which we should check the bytecodes
28 | :param original_bytecode: The original bytecodes {'contractName': '0x00'}
29 | :return: Whether the bytecodes match up
30 | """
31 | current_bytecodes = self.get_bytecodes(working_directory)
32 | for contractName, bytecode in current_bytecodes.items():
33 | if original_bytecode[contractName] != bytecode:
34 | return False
35 | return True
36 |
37 |
38 | class Tester(ABC):
39 | """Tester interface exposes testing functionality from testing frame work"""
40 |
41 | @abstractmethod
42 | def run_tests(
43 | self,
44 | coverage: bool = False,
45 | mutation: Mutation = None,
46 | timeout=None,
47 | network: str = None,
48 | original_bytecode: Dict[str, str] = None,
49 | keep_test_names: List[str] = None,
50 | ) -> Union[None, Dict[str, TestResult]]:
51 | pass
52 |
53 |
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/hardhat/__init__.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.core.campaign import BaseCampaign
2 | from typing import List
3 | from eth_vertigo.mutator.mutator import Mutator
4 | from pathlib import Path
5 | from eth_vertigo.core.network import NetworkPool
6 | from json import loads
7 |
8 |
9 | class HardhatCampaign(BaseCampaign):
10 | def __init__(
11 | self,
12 | hardhat_command: List[str],
13 | project_directory: Path,
14 | mutators: List[Mutator],
15 | network_pool: NetworkPool,
16 | filters=None,
17 | suggesters=None
18 | ):
19 | from eth_vertigo.interfaces.hardhat.tester import HardhatTester
20 | from eth_vertigo.interfaces.hardhat.compile import HardhatCompiler
21 | from eth_vertigo.interfaces.hardhat.mutator import HardhatSourceFile
22 |
23 | compiler = HardhatCompiler(hardhat_command)
24 | tester = HardhatTester(hardhat_command, str(project_directory), compiler)
25 | source_file_builder = lambda ast, full_path: HardhatSourceFile(ast, full_path)
26 |
27 | super().__init__(
28 | project_directory=project_directory,
29 | mutators=mutators,
30 | network_pool=network_pool,
31 |
32 | compiler=compiler,
33 | tester=tester,
34 | source_file_builder=source_file_builder,
35 |
36 | filters=filters,
37 | suggesters=suggesters
38 | )
39 |
40 | def _get_sources(self):
41 | """ Implements basic mutator file discovery """
42 | contracts_dir = self.project_directory / "artifacts" / "contracts"
43 | if not contracts_dir.exists():
44 | self.compiler.run_compilation(str(self.project_directory))
45 |
46 | contract_directories = []
47 | def explore_contracts(directory: Path):
48 | for item in directory.iterdir():
49 | if item.name.endswith(".sol"):
50 | contract_directories.append(item)
51 | elif item.is_dir():
52 | explore_contracts(item)
53 |
54 | explore_contracts(contracts_dir)
55 |
56 | for contract_dir in contract_directories:
57 | for contract in [c for c in contract_dir.iterdir() if "dbg.json" not in c.name]:
58 |
59 | dbg_json = contract_dir / contract.name.replace('.json', '.dbg.json')
60 |
61 | contract = loads(contract.read_text("utf-8"))
62 | dbg = loads(dbg_json.read_text("utf-8"))
63 |
64 | source_name = contract["sourceName"]
65 | build_info_file = contract_dir / dbg["buildInfo"]
66 | build_info = loads(build_info_file.read_text("utf-8"))
67 |
68 | ast = build_info["output"]["sources"][source_name]["ast"]
69 | absolute_path = self.project_directory / ast["absolutePath"]
70 |
71 | yield self.source_file_builder(ast, absolute_path)
72 |
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/hardhat/compile.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.interfaces.hardhat.core import HardhatCore
2 | from eth_vertigo.interfaces.common import strip_metadata
3 | from eth_vertigo.interfaces.generics import Compiler
4 | from typing import Dict
5 |
6 | from subprocess import Popen, TimeoutExpired
7 | from tempfile import TemporaryFile
8 | from pathlib import Path
9 |
10 | from loguru import logger
11 | import json
12 |
13 |
14 | class HardhatCompiler(Compiler, HardhatCore):
15 | def run_compilation(self, working_directory: str) -> None:
16 | with TemporaryFile() as stdin, TemporaryFile() as stdout, TemporaryFile() as stderr:
17 | stdin.seek(0)
18 | proc = Popen(
19 | self.hardhat_command + ['compile'],
20 | stdin=stdin,
21 | stdout=stdout,
22 | stderr=stderr,
23 | cwd=working_directory
24 | )
25 | proc.wait()
26 | stdout.seek(0)
27 | output = stdout.read()
28 |
29 | split = output.decode('utf-8').split("\n")
30 |
31 | errors = []
32 | for line in split:
33 | if line.startswith("Error"):
34 | errors.append(line)
35 |
36 | if errors:
37 | raise Exception("Encountered compilation error: \n" + "\n".join(errors))
38 |
39 | def get_bytecodes(self, working_directory: str) -> Dict[str, str]:
40 | w_dir = Path(working_directory)
41 | self.run_compilation(working_directory)
42 |
43 | if not (w_dir / "artifacts").is_dir():
44 | logger.error("Compilation did not create build directory")
45 |
46 | contracts_dir = w_dir / "artifacts" / "contracts"
47 | if not contracts_dir.is_dir():
48 | logger.error("No contracts directory found in build directory")
49 |
50 | contract_directories = []
51 |
52 | def explore_contracts(directory: Path):
53 | for item in directory.iterdir():
54 | if item.name.endswith(".sol"):
55 | contract_directories.append(item)
56 | elif item.is_dir():
57 | explore_contracts(item)
58 |
59 | explore_contracts(contracts_dir)
60 |
61 | current_bytecode = {}
62 |
63 | for contract_dir in contract_directories:
64 | for contract in [c for c in contract_dir.iterdir() if "dbg.json" not in c.name]:
65 | try:
66 | contract_compilation_result = json.loads(contract.read_text('utf-8'))
67 | except json.JSONDecodeError:
68 | logger.warning(f"Could not read compilation result for {contract.name}")
69 | continue
70 |
71 | current_bytecode[contract_compilation_result["contractName"]] = \
72 | strip_metadata(contract_compilation_result["bytecode"])
73 |
74 | return current_bytecode
75 |
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/hardhat/core.py:
--------------------------------------------------------------------------------
1 | class HardhatCore:
2 | def __init__(self, hardhat_command):
3 | self.hardhat_command = hardhat_command
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/hardhat/mutator.py:
--------------------------------------------------------------------------------
1 | from jsonpath_rw import parse
2 | from json import loads
3 | from pathlib import Path
4 | from eth_vertigo.mutator.source_file import SourceFile
5 | from typing import Dict
6 |
7 | def _get_src(src_str: str):
8 | return [int(e) for e in src_str.split(":")]
9 |
10 |
11 | def _get_binaryop_info(node: dict):
12 | """
13 | Gets info on the binary operation from an ast node
14 | This ast node must be referencing an binary operation
15 |
16 | :param node: ast node to look for
17 | :return: the operator, src for the operator
18 | """
19 | if node["nodeType"] != "BinaryOperation":
20 | raise ValueError("Passed node is not a binary operation")
21 |
22 | c_src = _get_src(node["src"])
23 |
24 | original_operator = node["operator"]
25 | op0_src = _get_src(node["leftExpression"]["src"])
26 | op1_src = _get_src(node["rightExpression"]["src"])
27 |
28 | if not (c_src[2] == op0_src[2] == op1_src[2]):
29 | raise ValueError("src fields are inconsistent")
30 |
31 | start = op0_src[0] + op0_src[1]
32 | length = op1_src[0] - start
33 | op_src = (start, length, c_src[2])
34 |
35 | return original_operator, op_src
36 |
37 |
38 | def _get_op_info(node: dict):
39 | c_src = _get_src(node["src"])
40 |
41 | original_operator = node["operator"]
42 | op0_src = _get_src(node["leftHandSide"]["src"])
43 | op1_src = _get_src(node["rightHandSide"]["src"])
44 |
45 | if not (c_src[2] == op0_src[2] == op1_src[2]):
46 | raise ValueError("src fields are inconsistent")
47 |
48 | start = op0_src[0] + op0_src[1]
49 | length = op1_src[0] - start
50 | op_src = (start, length, c_src[2])
51 |
52 | return original_operator, op_src
53 |
54 |
55 | class HardhatSourceFile(SourceFile):
56 | def __init__(self, ast: Dict, file: Path):
57 | self.ast = ast
58 | super().__init__(file)
59 |
60 | def get_binary_op_locations(self):
61 | path_expr = parse('*..nodeType.`parent`')
62 | for match in path_expr.find(self.ast):
63 | if match.value["nodeType"] != "BinaryOperation":
64 | continue
65 | yield _get_binaryop_info(match.value)
66 |
67 | def get_if_statement_binary_ops(self):
68 | path_expr = parse('*..nodeType.`parent`')
69 | for match in path_expr.find(self.ast):
70 | if match.value["nodeType"] != "IfStatement":
71 | continue
72 | condition = match.value["children"][0]
73 | yield _get_binaryop_info(condition)
74 |
75 | def get_assignments(self):
76 | path_expr = parse('*..nodeType.`parent`')
77 | for match in path_expr.find(self.ast):
78 | if match.value["nodeType"] != "Assignment":
79 | continue
80 | yield _get_op_info(match.value)
81 |
82 | def get_void_calls(self):
83 | path_expr = parse('*..nodeType.`parent`')
84 | for match in path_expr.find(self.ast):
85 | if match.value["nodeType"] != "FunctionCall":
86 | continue
87 | function_identifier = match.value["expression"]
88 |
89 | function_typedef = function_identifier["typeDescriptions"]["typeString"]
90 | if "returns" in function_typedef:
91 | continue
92 | if "function" not in function_typedef:
93 | continue
94 | if function_identifier["typeDescriptions"]["typeIdentifier"].startswith("t_function_event"):
95 | continue
96 |
97 | try:
98 | if "require" in function_identifier["name"]:
99 | continue
100 | except KeyError:
101 | continue
102 | yield (None, _get_src(match.value["src"]))
103 |
104 | def get_modifier_invocations(self):
105 | path_expr = parse('*..nodeType.`parent`')
106 | for match in path_expr.find(self.ast):
107 | if match.value["nodeType"] != "ModifierInvocation":
108 | continue
109 | yield (None, _get_src(match.value["src"]))
110 |
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/hardhat/tester.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 | from eth_vertigo.interfaces.hardhat.core import HardhatCore
3 | from eth_vertigo.interfaces.common.tester import MochaStdoutTester
4 | from eth_vertigo.interfaces.generics import Tester, Compiler
5 |
6 | from typing import Optional, List
7 | from pathlib import Path
8 |
9 |
10 | def _set_reporter(directory: str):
11 | config = Path(directory) / "hardhat.config.js"
12 | content = config.read_text("utf-8")
13 | content += "\nmodule.exports.mocha = {reporter: \"json\"};\n"
14 | content += "\nmodule.exports.solc = {optimizer: { enabled: true, runs: 200 }};\n"
15 | config.write_text(content, "utf-8")
16 |
17 |
18 | def _set_include_tests(directory: str, test_names: List[str]):
19 | config = Path(directory) / "hardhat.config.js"
20 |
21 | content = config.read_text("utf-8")
22 |
23 | test_regex = "({})".format("|".join(test_names))
24 | content += "\nmodule.exports.mocha.grep= \"" + test_regex + "\";\n"
25 | config.write_text(content, "utf-8")
26 |
27 |
28 | class HardhatTester(HardhatCore, MochaStdoutTester):
29 | def __init__(self, hardhat_command: List[str], project_directory, compiler: Compiler):
30 | self.project_directory = project_directory
31 | self.compiler = compiler
32 | HardhatCore.__init__(self, hardhat_command)
33 |
34 | def instrument_configuration(self, directory, keep_test_names: Optional[List[str]]):
35 | _set_reporter(directory)
36 | if keep_test_names:
37 | _set_include_tests(self.directory, keep_test_names)
38 |
39 | def build_test_command(self, network: Optional[str]) -> List[str]:
40 | result = self.hardhat_command + ['test']
41 | # if network:
42 | # result.extend(['--network', network])
43 | return result
44 |
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/truffle/__init__.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.core.campaign import BaseCampaign
2 | from typing import List
3 | from eth_vertigo.mutator.mutator import Mutator
4 | from pathlib import Path
5 | from eth_vertigo.core.network import NetworkPool
6 |
7 |
8 | class TruffleCampaign(BaseCampaign):
9 | def __init__(
10 | self,
11 | truffle_location: str,
12 | project_directory: Path,
13 | mutators: List[Mutator],
14 | network_pool: NetworkPool,
15 | filters=None,
16 | suggesters=None
17 | ):
18 | from eth_vertigo.interfaces.truffle.tester import TruffleTester
19 | from eth_vertigo.interfaces.truffle.compiler import TruffleCompiler
20 | from eth_vertigo.interfaces.truffle.mutator import SolidityFile
21 |
22 | compiler = TruffleCompiler(truffle_location)
23 | tester = TruffleTester(truffle_location, str(project_directory), compiler)
24 | source_file_builder = lambda path: SolidityFile(path)
25 |
26 | super().__init__(
27 | project_directory=project_directory,
28 | mutators=mutators,
29 | network_pool=network_pool,
30 |
31 | compiler=compiler,
32 | tester=tester,
33 | source_file_builder=source_file_builder,
34 |
35 | filters=filters,
36 | suggesters=suggesters
37 | )
38 |
39 | def _get_sources(self, dir=None):
40 | """ Implements basic mutator file discovery """
41 | if not (self.project_directory / "build").exists():
42 | self.compiler.run_compilation(str(self.project_directory))
43 |
44 | dir = dir or self.source_directory
45 | for source_file in dir.iterdir():
46 | if source_file.name == "Migrations.json":
47 | continue
48 | if not source_file.name.endswith(".json"):
49 | continue
50 | yield self.source_file_builder(source_file)
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/truffle/compiler.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.interfaces.generics import Compiler, Tester
2 | from eth_vertigo.interfaces.truffle.core import TruffleCore
3 | from eth_vertigo.interfaces.common import strip_metadata
4 | from eth_vertigo.test_runner.exceptions import TestRunException, TimedOut
5 | from eth_vertigo.test_runner import TestResult
6 | from json import loads, JSONDecodeError
7 | from subprocess import Popen, TimeoutExpired
8 | from tempfile import TemporaryFile
9 | from pathlib import Path
10 | from typing import Dict, Union
11 | from loguru import logger
12 | import json
13 | import re
14 |
15 |
16 | class TruffleCompiler(TruffleCore, Compiler):
17 | def run_compilation(self, working_directory: str) -> None:
18 | command = [
19 | self.truffle_location, 'compile'
20 | ]
21 |
22 | with TemporaryFile() as stdin, TemporaryFile() as stdout:
23 | stdin.seek(0)
24 | proc = Popen(command, stdin=stdin, stdout=stdout, cwd=working_directory)
25 | proc.wait()
26 | stdout.seek(0)
27 | output = stdout.read()
28 |
29 | split = output.decode('utf-8').split("\n")
30 |
31 | errors = []
32 | for line in split:
33 | if line.startswith("Error"):
34 | errors.append(line)
35 |
36 | if errors:
37 | raise Exception("Encountered compilation error: \n" + "\n".join(errors))
38 |
39 | def get_bytecodes(self, working_directory: str) -> Dict[str, str]:
40 | """ Returns the bytecodes in the compilation result of the current directory
41 |
42 | :param working_directory: The truffle directory for which we retreive the bytecodes
43 | :return: bytecodes in the shape {'contractName': '0x00'}
44 | """
45 | w_dir = Path(working_directory)
46 | self.run_compilation(working_directory)
47 | if not (w_dir / "build").is_dir():
48 | logger.error("Compilation did not create build directory")
49 |
50 | contracts_dir = w_dir / "build" / "contracts"
51 | if not contracts_dir.is_dir():
52 | logger.error("No contracts directory found in build directory")
53 |
54 | current_bytecode = {}
55 |
56 | for contract in contracts_dir.iterdir():
57 | try:
58 | contract_compilation_result = json.loads(contract.read_text('utf-8'))
59 | except json.JSONDecodeError:
60 | logger.warning(f"Could not read compilation result for {contract.name}")
61 | continue
62 |
63 | current_bytecode[contract_compilation_result["contractName"]] = \
64 | strip_metadata(contract_compilation_result["bytecode"])
65 | return current_bytecode
66 |
67 |
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/truffle/core.py:
--------------------------------------------------------------------------------
1 | class TruffleCore:
2 | """ Truffle interface object, deals with the ugly commandline details"""
3 |
4 | def __init__(self, truffle_location: str = "truffle") -> None:
5 | """ Initializes a new truffle object
6 |
7 | :param truffle_location: Location where the truffle cli can be found
8 | """
9 | self.truffle_location = truffle_location
10 |
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/truffle/mutator.py:
--------------------------------------------------------------------------------
1 | from jsonpath_rw import parse
2 | from json import loads
3 | from pathlib import Path
4 | from eth_vertigo.mutator.source_file import SourceFile
5 |
6 |
7 | def _get_ast(json_file):
8 | return loads(json_file.read_text("utf-8"))
9 |
10 |
11 | def _get_src(src_str: str):
12 | return [int(e) for e in src_str.split(":")]
13 |
14 |
15 | def _get_binaryop_info(node: dict):
16 | """
17 | Gets info on the binary operation from an ast node
18 | This ast node must be referencing an binary operation
19 |
20 | :param node: ast node to look for
21 | :return: the operator, src for the operator
22 | """
23 | if node["nodeType"] != "BinaryOperation":
24 | raise ValueError("Passed node is not a binary operation")
25 |
26 | c_src = _get_src(node["src"])
27 |
28 | original_operator = node["operator"]
29 | op0_src = _get_src(node["leftExpression"]["src"])
30 | op1_src = _get_src(node["rightExpression"]["src"])
31 |
32 | if not (c_src[2] == op0_src[2] == op1_src[2]):
33 | raise ValueError("src fields are inconsistent")
34 |
35 | start = op0_src[0] + op0_src[1]
36 | length = op1_src[0] - start
37 | op_src = (start, length, c_src[2])
38 |
39 | return original_operator, op_src
40 |
41 |
42 | def _get_op_info(node: dict):
43 | c_src = _get_src(node["src"])
44 |
45 | original_operator = node["operator"]
46 | op0_src = _get_src(node["leftHandSide"]["src"])
47 | op1_src = _get_src(node["rightHandSide"]["src"])
48 |
49 | if not (c_src[2] == op0_src[2] == op1_src[2]):
50 | raise ValueError("src fields are inconsistent")
51 |
52 | start = op0_src[0] + op0_src[1]
53 | length = op1_src[0] - start
54 | op_src = (start, length, c_src[2])
55 |
56 | return original_operator, op_src
57 |
58 |
59 | class SolidityFile(SourceFile):
60 | def __init__(self, json_path: Path):
61 | self.json = _get_ast(json_path)
62 | self.ast = self.json["ast"]
63 | file = Path(self.json["sourcePath"])
64 | super().__init__(file)
65 |
66 | def get_binary_op_locations(self):
67 | path_expr = parse('*..nodeType.`parent`')
68 | for match in path_expr.find(self.ast):
69 | if match.value["nodeType"] != "BinaryOperation":
70 | continue
71 | yield _get_binaryop_info(match.value)
72 |
73 | def get_if_statement_binary_ops(self):
74 | path_expr = parse('*..nodeType.`parent`')
75 | for match in path_expr.find(self.ast):
76 | if match.value["nodeType"] != "IfStatement":
77 | continue
78 | condition = match.value["children"][0]
79 | yield _get_binaryop_info(condition)
80 |
81 | def get_assignments(self):
82 | path_expr = parse('*..nodeType.`parent`')
83 | for match in path_expr.find(self.ast):
84 | if match.value["nodeType"] != "Assignment":
85 | continue
86 | yield _get_op_info(match.value)
87 |
88 | def get_void_calls(self):
89 | path_expr = parse('*..nodeType.`parent`')
90 | for match in path_expr.find(self.ast):
91 | if match.value["nodeType"] != "FunctionCall":
92 | continue
93 | function_identifier = match.value["expression"]
94 |
95 | function_typedef = function_identifier["typeDescriptions"]["typeString"]
96 | if "returns" in function_typedef:
97 | continue
98 | if "function" not in function_typedef:
99 | continue
100 | if function_identifier["typeDescriptions"]["typeIdentifier"].startswith("t_function_event"):
101 | continue
102 |
103 | try:
104 | if "require" in function_identifier["name"]:
105 | continue
106 | except KeyError:
107 | continue
108 | yield (None, _get_src(match.value["src"]))
109 |
110 | def get_modifier_invocations(self):
111 | path_expr = parse('*..nodeType.`parent`')
112 | for match in path_expr.find(self.ast):
113 | if match.value["nodeType"] != "ModifierInvocation":
114 | continue
115 | yield (None, _get_src(match.value["src"]))
116 |
--------------------------------------------------------------------------------
/eth_vertigo/interfaces/truffle/tester.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from typing import List, Optional
3 |
4 | from eth_vertigo.interfaces.common.tester import MochaStdoutTester
5 | from eth_vertigo.interfaces.generics import Compiler
6 | from eth_vertigo.interfaces.truffle.core import TruffleCore
7 |
8 |
9 | def _set_reporter(directory: str):
10 | config = Path(directory) / "truffle.js"
11 | if not config.is_file():
12 | config = Path(directory) / "truffle-config.js"
13 | content = config.read_text("utf-8")
14 | content += "\nmodule.exports.mocha = {reporter: \"json\"};\n"
15 | content += "\nmodule.exports.solc = {optimizer: { enabled: true, runs: 200}};\n"
16 | config.write_text(content, "utf-8")
17 |
18 |
19 | def _set_include_tests(directory: str, test_names: List[str]):
20 | config = Path(directory) / "truffle.js"
21 | if not config.is_file():
22 | config = Path(directory) / "truffle-config.js"
23 |
24 | content = config.read_text("utf-8")
25 |
26 | test_regex = "({})".format("|".join(test_names))
27 | content += "\nmodule.exports.mocha.grep= \"" + test_regex + "\";\n"
28 | config.write_text(content, "utf-8")
29 |
30 |
31 | class TruffleTester(TruffleCore, MochaStdoutTester):
32 | def __init__(self, truffle_location, project_directory, compiler: Compiler):
33 | self.project_directory = project_directory
34 | self.compiler = compiler
35 | TruffleCore.__init__(self, truffle_location)
36 |
37 | def instrument_configuration(self, working_directory, keep_test_names: Optional[List[str]]):
38 | _set_reporter(working_directory)
39 | if keep_test_names:
40 | _set_include_tests(working_directory, keep_test_names)
41 |
42 | def build_test_command(self, network: Optional[str]) -> List[str]:
43 | result = [self.truffle_location, 'test']
44 | if network:
45 | result.extend(['--network', network])
46 | return result
47 |
--------------------------------------------------------------------------------
/eth_vertigo/mutator/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JoranHonig/vertigo/659b71cf763106c0d0fc8c1ce130658cece997c7/eth_vertigo/mutator/__init__.py
--------------------------------------------------------------------------------
/eth_vertigo/mutator/mutation_configuration.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class MutationStrategy(Enum):
5 | """
6 | Mutation strategy enum that signifies the different strategies that are available
7 | """
8 | BoundaryConditionals = 1
9 | NegatedConditionals = 2
10 | MathOperations = 3
11 | MutateIncrements = 4
12 | MutateVoidCalls = 5
13 |
14 |
15 | class MutationConfiguration:
16 | """This class is used to convey information to the Mutator instances
17 |
18 | Selecting core profiles:
19 | This configuration object lets you configure a Mutator instance to only execute specific mutations strategies
20 | and leave others
21 | """
22 |
23 | def __init__(self, enabled_strategies=None):
24 | """ Initialize core configuration object
25 |
26 | :param enabled_strategies:
27 | """
28 | self.enabled_strategies = enabled_strategies or []
29 |
30 | @property
31 | def all_enabled(self):
32 | """ Returns whether all strategies are enabled at this moment"""
33 | return len(self.enabled_strategies) in (0, 5)
34 |
35 | def is_enabled(self, strategy: MutationStrategy):
36 | """ Returns whether the given strategy is enabled in this configuration"""
37 | return strategy in self.enabled_strategies
38 |
--------------------------------------------------------------------------------
/eth_vertigo/mutator/mutator.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.mutator.source_file import SourceFile
2 | from pathlib import Path
3 |
4 |
5 | class Mutator:
6 | """ Mutator
7 |
8 | A mutator implements the logic to generate mutants
9 | """
10 | def mutate(self, source_file: SourceFile, project_directory: Path):
11 | raise NotImplementedError
12 |
--------------------------------------------------------------------------------
/eth_vertigo/mutator/solidity/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JoranHonig/vertigo/659b71cf763106c0d0fc8c1ce130658cece997c7/eth_vertigo/mutator/solidity/__init__.py
--------------------------------------------------------------------------------
/eth_vertigo/mutator/solidity/solidity_file.py:
--------------------------------------------------------------------------------
1 | from jsonpath_rw import parse
2 | from subprocess import Popen, PIPE
3 | import re
4 | from json import loads, JSONDecodeError
5 | from pathlib import Path
6 | from eth_vertigo.mutator.source_file import SourceFile
7 |
8 |
9 | def _get_ast(file: Path):
10 | # Assert precondition
11 | if not file.exists():
12 | raise ValueError("File does not exist")
13 |
14 | # Execute solc
15 | command = ["/home/walker/installers/solc", "--ast-json"]
16 | command += [str(file)]
17 | proc = Popen(command, stdin=PIPE, stdout=PIPE, stderr=PIPE)
18 | output, err = proc.communicate()
19 |
20 | if output == "":
21 | raise Exception("Error while retrieving the ast: {}".format(err))
22 |
23 | # Find the different ast's
24 | found_asts = {}
25 | current_contract = None
26 | current_json_lines = []
27 |
28 | for line in output.decode('utf-8').split("\n"):
29 | if re.match("======= .* =======", line):
30 | if current_json_lines and current_contract:
31 | json_string = "\n".join(current_json_lines)
32 | try:
33 | found_asts[current_contract] = loads(json_string)
34 | except JSONDecodeError:
35 | pass
36 |
37 | current_contract = line.split(" ")[1]
38 | current_json_lines = []
39 | else:
40 | current_json_lines.append(line)
41 |
42 | if str(file) in found_asts.keys():
43 | return found_asts[str(file)]
44 | return None
45 |
46 |
47 | def _get_src(src_str: str):
48 | return [int(e) for e in src_str.split(":")]
49 |
50 |
51 | def _get_binaryop_info(node: dict):
52 | """
53 | Gets info on the binary operation from an ast node
54 | This ast node must be referencing an binary operation
55 |
56 | :param node: ast node to look for
57 | :return: the operator, src for the operator
58 | """
59 | if node["name"] != "BinaryOperation":
60 | raise ValueError("Passed node is not a binary operation")
61 |
62 | c_src = _get_src(node["src"])
63 |
64 | original_operator = node["attributes"]["operator"]
65 | op0_src = _get_src(node["children"][0]["src"])
66 | op1_src = _get_src(node["children"][1]["src"])
67 |
68 | if not (c_src[2] == op0_src[2] == op1_src[2]):
69 | raise ValueError("src fields are inconsistent")
70 |
71 | start = op0_src[0] + op0_src[1]
72 | length = op1_src[0] - start
73 | op_src = (start, length, c_src[2])
74 |
75 | return original_operator, op_src
76 |
77 |
78 | def _get_op_info(node: dict):
79 | c_src = _get_src(node["src"])
80 |
81 | original_operator = node["attributes"]["operator"]
82 | op0_src = _get_src(node["children"][0]["src"])
83 | op1_src = _get_src(node["children"][1]["src"])
84 |
85 | if not (c_src[2] == op0_src[2] == op1_src[2]):
86 | raise ValueError("src fields are inconsistent")
87 |
88 | start = op0_src[0] + op0_src[1]
89 | length = op1_src[0] - start
90 | op_src = (start, length, c_src[2])
91 |
92 | return original_operator, op_src
93 |
94 |
95 | class SolidityFile(SourceFile):
96 | def __init__(self, file: Path):
97 | super().__init__(file)
98 | self.ast = _get_ast(file)
99 |
100 | def get_binary_op_locations(self):
101 | path_expr = parse('*..name.`parent`')
102 | for match in path_expr.find(self.ast):
103 | if match.value["name"] != "BinaryOperation":
104 | continue
105 | yield _get_binaryop_info(match.value)
106 |
107 | def get_if_statement_binary_ops(self):
108 | path_expr = parse('*..name.`parent`')
109 | for match in path_expr.find(self.ast):
110 | if match.value["name"] != "IfStatement":
111 | continue
112 | condition = match.value["children"][0]
113 | yield _get_binaryop_info(condition)
114 |
115 | def get_assignments(self):
116 | path_expr = parse('*..name.`parent`')
117 | for match in path_expr.find(self.ast):
118 | if match.value["name"] != "Assignment":
119 | continue
120 | yield _get_op_info(match.value)
121 |
122 | def get_void_calls(self):
123 | path_expr = parse('*..name.`parent`')
124 | for match in path_expr.find(self.ast):
125 | if match.value["name"] != "FunctionCall":
126 | continue
127 | function_identifier = match.value["childen"][0]
128 | function_typedef = function_identifier["attributes"]["type"]
129 | if "returns" in function_typedef:
130 | continue
131 | if "function" not in function_typedef:
132 | continue
133 | if "require" in function_identifier["attributes"]["value"]:
134 | continue
135 | yield (None, _get_src(match.value["src"]))
136 |
137 | def get_modifier_invocations(self):
138 | return []
139 |
--------------------------------------------------------------------------------
/eth_vertigo/mutator/solidity/solidity_mutator.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.mutator.solidity.solidity_file import SolidityFile
2 | from eth_vertigo.mutator.source_mutator import SourceMutator
3 | from eth_vertigo.core.mutation import Mutation
4 | from pathlib import Path
5 |
6 | conditional_boundary_mutator = {
7 | "==": " != ",
8 | "<": " <= ",
9 | "<=": " < ",
10 | ">": " >= "
11 | }
12 |
13 | conditional_negation_mutator = {
14 | "==": " != ",
15 | "!=": " == ",
16 | "<=": " > ",
17 | ">=": " < ",
18 | "<": " >= ",
19 | ">": " <= "
20 | }
21 |
22 | increments_mutator = {
23 | "+=": " -= ",
24 | "-=": " += "
25 | }
26 |
27 | math_mutants = {
28 | "+": " - ",
29 | "-": " + ",
30 | "*": " / ",
31 | "/": " * ",
32 | "&": " | ",
33 | "|": " & ",
34 | "^": " & ",
35 | "~": "",
36 | "<<": " >> ",
37 | ">>": " << "
38 | }
39 |
40 | mirror_mutants = {
41 | "-=": "=-",
42 | "+=": "=+"
43 | }
44 |
45 |
46 | def _mutate_binary_op(
47 | mutate_dict: dict, source_file: SolidityFile, project_directory: Path
48 | ):
49 | interesting_locs = list(source_file.get_binary_op_locations())
50 |
51 | for original_operator, src in interesting_locs:
52 | if original_operator not in mutate_dict.keys():
53 | continue
54 | yield Mutation(
55 | src, source_file, mutate_dict[original_operator], project_directory
56 | )
57 |
58 |
59 | def _mutate_assignment(
60 | mutate_dict: dict, source_file: SolidityFile, project_directory: Path
61 | ):
62 | interesting_locs = source_file.get_assignments()
63 |
64 | for original_operator, src in interesting_locs:
65 | if original_operator not in mutate_dict.keys():
66 | continue
67 | yield Mutation(
68 | src, source_file, mutate_dict[original_operator], project_directory
69 | )
70 |
71 |
72 | class SolidityMutator(SourceMutator):
73 | @staticmethod
74 | def mutate_boundary_conditionals(source_file: SolidityFile, project_directory: Path):
75 | return _mutate_binary_op(conditional_boundary_mutator, source_file, project_directory)
76 |
77 | @staticmethod
78 | def mutate_negated_conditionals(source_file: SolidityFile, project_directory: Path):
79 | return _mutate_binary_op(conditional_negation_mutator, source_file, project_directory)
80 |
81 | @staticmethod
82 | def mutate_math_ops(source_file: SolidityFile, project_directory: Path):
83 | return _mutate_binary_op(math_mutants, source_file, project_directory)
84 |
85 | @staticmethod
86 | def mutate_increments(source_file: SolidityFile, project_directory: Path):
87 | return _mutate_assignment(increments_mutator, source_file, project_directory)
88 |
89 | @staticmethod
90 | def mutate_mirror(source_file: SolidityFile, project_directory: Path):
91 | return _mutate_assignment(mirror_mutants, source_file, project_directory)
92 |
93 | @staticmethod
94 | def mutate_voids(source_file: SolidityFile, project_directory: Path):
95 | void_calls = list(source_file.get_void_calls())
96 | for _, src in void_calls:
97 | src[1] += 1
98 | yield Mutation(src, source_file, "", project_directory)
99 |
100 | @staticmethod
101 | def mutate_modifier(source_file: SolidityFile, project_directory: Path):
102 | modifier_invocations = list(source_file.get_modifier_invocations())
103 | for _, src in modifier_invocations:
104 | yield Mutation(src, source_file, "", project_directory)
105 |
--------------------------------------------------------------------------------
/eth_vertigo/mutator/source_file.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from typing import Tuple
3 |
4 |
5 | class SourceFile:
6 | """ SourceFile objects represent contract files
7 |
8 | Specifically it stores information on the file it represents and it exposes functions that
9 | allow other components to find specific interesting locations in the mutator file.
10 | This is mostly used by the different Mutator implementations
11 | """
12 |
13 | def __init__(self, file: Path):
14 | self.file = file
15 |
16 | def get_binary_op_locations(self) -> Tuple[str, Tuple[int, int, int]]:
17 | """Gets locations for all the binary operations in the sourcefile
18 | :returns tuple with (original_operation, src)
19 | """
20 | raise NotImplementedError
21 |
22 | def get_if_statement_binary_ops(self) -> Tuple[str, Tuple[int, int, int]]:
23 | """Gets locations for all the binary operations in the sourcefile
24 |
25 | Specifically those supplied as an argument to an if statement
26 | :returns tuple with (original_operation, src)
27 | """
28 | raise NotImplementedError
29 |
30 | def get_assignments(self) -> Tuple[str, Tuple[int, int, int]]:
31 | """Gets locations for all the assignments in the sourcefile
32 | :returns tuple with (original_operation, src)
33 | """
34 | raise NotImplementedError
35 |
36 | def get_void_calls(self) -> Tuple[str, Tuple[int, int, int]]:
37 | """Gets locations for all the void calls in the sourcefile
38 | :returns tuple with (original_operation, src)
39 | """
40 | raise NotImplementedError
41 |
--------------------------------------------------------------------------------
/eth_vertigo/mutator/source_mutator.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.mutator.source_file import SourceFile
2 | from pathlib import Path
3 | from eth_vertigo.mutator.mutator import Mutator
4 |
5 |
6 | class SourceMutator(Mutator):
7 | def mutate(self, source_file: SourceFile, project_directory: Path):
8 | if not source_file.file.exists():
9 | return []
10 |
11 | result = []
12 |
13 | result += list(self.mutate_boundary_conditionals(source_file, project_directory))
14 | result += list(self.mutate_negated_conditionals(source_file, project_directory))
15 | result += list(self.mutate_math_ops(source_file, project_directory))
16 | result += list(self.mutate_increments(source_file, project_directory))
17 | result += list(self.mutate_voids(source_file, project_directory))
18 | result += list(self.mutate_modifier(source_file, project_directory))
19 |
20 | return result
21 |
22 | @staticmethod
23 | def mutate_boundary_conditionals(source_file: SourceFile, project_directory: Path):
24 | raise NotImplementedError
25 |
26 | @staticmethod
27 | def mutate_negated_conditionals(source_file: SourceFile, project_directory: Path):
28 | raise NotImplementedError
29 |
30 | @staticmethod
31 | def mutate_math_ops(source_file: SourceFile, project_directory: Path):
32 | raise NotImplementedError
33 |
34 | @staticmethod
35 | def mutate_increments(source_file: SourceFile, project_directory: Path):
36 | raise NotImplementedError
37 |
38 | @staticmethod
39 | def mutate_voids(source_file: SourceFile, project_directory: Path):
40 | raise NotImplementedError
41 |
42 | @staticmethod
43 | def mutate_modifier(source_file: SourceFile, project_directory: Path):
44 | raise NotImplementedError
45 |
--------------------------------------------------------------------------------
/eth_vertigo/mutator/universal_mutator/__init__.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from typing import List
3 |
4 | from eth_vertigo.mutator.universal_mutator.rule import Rule
5 | from eth_vertigo.mutator.universal_mutator.rule_loader import RuleLoader
6 | from eth_vertigo.mutator.mutator import Mutator
7 | from eth_vertigo.mutator.source_file import SourceFile
8 | from eth_vertigo.core.mutation import Mutation
9 |
10 |
11 | class UniversalMutator(Mutator):
12 | """Mutator based on Universal Mutator
13 |
14 | Props to the design of universal mutator style mutation rules and evaluation of the approach go to Alex Groce et al.
15 | Repository:
16 | https://github.com/agroce/universalmutator
17 | Paper:
18 | An Extensible, Regular-Expression-Based Tool for Multi-Language Mutant Generation - Alex Groce et al.
19 | """
20 |
21 | def __init__(self):
22 | self.rule_sets = {}
23 |
24 | def load_rule(self, rule_file: Path):
25 | """Load rule from rule files"""
26 | loader = RuleLoader()
27 | rules = list(loader.load_from_file(rule_file))
28 | self.rule_sets[str(rule_file.name)] = rules
29 |
30 | def mutate(self, source_file: SourceFile, project_directory: Path) -> List[Mutation]:
31 | """Generate mutants
32 |
33 | Generates mutants based on loaded universal mutator rules
34 | """
35 | mutants = []
36 | for rule_file, rules in self.rule_sets.items():
37 | for rule in rules:
38 | mutants.extend(list(rule.generate_mutants(source_file, project_directory)))
39 | return mutants
40 |
--------------------------------------------------------------------------------
/eth_vertigo/mutator/universal_mutator/rule.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from typing import Generator
3 | from eth_vertigo.core import Mutation
4 | from eth_vertigo.mutator.source_file import SourceFile
5 | from re import finditer
6 |
7 |
8 | class Rule:
9 | """ Rule
10 |
11 | This class represents a single universal mutator style mutation rule.
12 |
13 | Additionally the class provides logic to apply the rule to smart contracts.
14 | """
15 |
16 | def __init__(self, match, replace: str):
17 | """ Instantiate a new Rule
18 |
19 | :param match: The regex expression that specifies which parts of a program to replace
20 | :param replace: The string with which to replace the matches
21 | """
22 | self.match = match
23 | self.replace = replace
24 |
25 | def generate_mutants(self, source: SourceFile, project_directory: Path) -> Generator[Mutation, None, None]:
26 | if self.replace in ("DO_NOT_MUTATE", ):
27 | return
28 |
29 | file = source.file
30 | file_content = file.read_text(encoding="utf-8")
31 |
32 | for occurrence in finditer(self.match, file_content):
33 | start = occurrence.start()
34 | end = occurrence.end() - 1
35 | size = end - start
36 | yield Mutation((start, size, 0), source, self.replace, project_directory)
37 |
--------------------------------------------------------------------------------
/eth_vertigo/mutator/universal_mutator/rule_loader.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.mutator.universal_mutator.rule import Rule
2 | from pathlib import Path
3 | from typing import Generator
4 | from re import compile, error
5 | from loguru import logger
6 |
7 |
8 | class RuleLoader:
9 | """ RuleLoader
10 |
11 | This class implements the logic to load universal mutator style mutation rules
12 | """
13 |
14 | def load_from_file(self, file: Path) -> Generator[Rule, None, None]:
15 | """Load mutation rules from file"""
16 | file_contents = file.read_text(encoding='utf-8')
17 | return self.load_from_txt(file_contents)
18 |
19 | def load_from_txt(self, rule_txt: str) -> Generator[Rule, None, None]:
20 | """Load mutation rules from text"""
21 | for line in rule_txt.split("\n"):
22 | # Deal with non mutation rule lines
23 | if "==>" not in line:
24 | if line.startswith("#"):
25 | continue
26 | if line == "":
27 | continue
28 | logger.warning(f"Error while parsing Universal Mutator rules:\n{line}")
29 | continue
30 |
31 | split_line = line.split("==>")
32 | if len(line) < 2:
33 | logger.warning(f"Error while parsing Universal Mutator rules:\n{line}")
34 | continue
35 |
36 | match = split_line[0]
37 | replace = split_line[1]
38 |
39 | # compile regex
40 | try:
41 | match = compile(match)
42 | except error:
43 | logger.warning(f"Error while parsing Universal Mutator rules.\nInvalid regular expression:\n{line}")
44 |
45 | # drop return char if present
46 | if replace and replace[-1] == '\n':
47 | replace = replace[:-1]
48 | replace.strip(' ')
49 | yield Rule(match, replace)
50 |
--------------------------------------------------------------------------------
/eth_vertigo/test_runner/__init__.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.test_runner.test_result import TestResult
2 | from eth_vertigo.test_runner.runner import Runner
3 |
--------------------------------------------------------------------------------
/eth_vertigo/test_runner/exceptions.py:
--------------------------------------------------------------------------------
1 | class TestRunException(Exception):
2 | pass
3 |
4 |
5 | class TimedOut(Exception):
6 | pass
7 |
8 |
9 | class DidNotCompile(Exception):
10 | pass
11 |
12 | class EquivalentMutant(Exception):
13 | pass
14 |
--------------------------------------------------------------------------------
/eth_vertigo/test_runner/file_editor.py:
--------------------------------------------------------------------------------
1 | from typing import Tuple
2 |
3 |
4 | class FileEditor:
5 | @staticmethod
6 | def edit(target_file_name: str, edit_location: Tuple[int, int, int], replacement: str) -> None:
7 | """
8 | Rewrites the edit location in the target file with the replacement
9 | :param target_file_name: Name of the file to apply the rewrite to
10 | :param edit_location: Location to rewrite
11 | :param replacement: Replacement for location
12 | """
13 | if edit_location[0] <= 0:
14 | raise ValueError("Cannot edit in the negative space")
15 |
16 | with open(target_file_name, 'r') as file:
17 | content = file.read()
18 |
19 | with open(target_file_name, 'w') as file:
20 | new_content = content[:edit_location[0]] \
21 | + replacement + \
22 | content[edit_location[0] + edit_location[1]:]
23 | file.write(new_content)
24 |
--------------------------------------------------------------------------------
/eth_vertigo/test_runner/runner.py:
--------------------------------------------------------------------------------
1 | class Runner:
2 |
3 | @property
4 | def tests(self) -> list:
5 | """ Returns a list of the tests """
6 | raise NotImplementedError
7 |
8 | def run_tests(self, coverage: bool):
9 | """ Runs all the tests that are available"""
10 | raise NotImplementedError
11 |
12 | def run_test(self, name: str, coverage: bool):
13 | """ Runs the test with the given name """
14 | raise NotImplementedError
15 |
--------------------------------------------------------------------------------
/eth_vertigo/test_runner/test_result.py:
--------------------------------------------------------------------------------
1 | class TestResult:
2 | def __init__(self, title: str, full_title: str, duration: int, success: bool):
3 | """ Initialize test result object
4 |
5 | :param title: Title of the test
6 | :param full_title: Full title of the test
7 | :param duration: Duration of the test
8 | :param success: Whether the test has succeeded
9 | """
10 | self.title = title
11 | self.full_title = full_title
12 | self.duration = duration
13 | self.success = success
14 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | jsonpath-rw
2 | pytest
3 | click
4 | tqdm
5 | jinja2
6 | loguru
7 | pyyaml
8 |
9 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from setuptools import setup, find_packages
3 |
4 | # The directory containing this file
5 | HERE = pathlib.Path(__file__).parent
6 |
7 | # The text of the README file
8 | README = (HERE / "README.md").read_text()
9 |
10 | REQUIREMENTS = (HERE / "requirements.txt")
11 |
12 | requirements = [x for x in map(str.strip, REQUIREMENTS.read_text().split("\n")) if x != ""]
13 |
14 | setup(
15 | name="eth-vertigo",
16 | version="1.3.0",
17 | description="Mutation Testing for Ethereum Smart Contracts",
18 | long_description=README,
19 | long_description_content_type="text/markdown",
20 | url="https://github.com/joranhonig/vertigo",
21 | author="Joran Honig",
22 | author_email="joran.honig@gmail.com",
23 | license="GPLv3",
24 | classifiers=[
25 | "Programming Language :: Python :: 3",
26 | "Programming Language :: Python :: 3.7",
27 | ],
28 | packages=find_packages(),
29 | include_package_data=True,
30 | install_requires=requirements,
31 | entry_points={
32 | "console_scripts": [
33 | "vertigo=eth_vertigo.cli.main:cli",
34 | ]
35 | },
36 | )
37 |
--------------------------------------------------------------------------------
/test/mutation/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JoranHonig/vertigo/659b71cf763106c0d0fc8c1ce130658cece997c7/test/mutation/__init__.py
--------------------------------------------------------------------------------
/test/mutation/test_mutation.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from pathlib import Path
3 | from eth_vertigo.mutator.source_file import SourceFile
4 |
5 | from eth_vertigo.core.mutation import Mutation, MutationResult
6 |
7 |
8 | def test_create_mutation(tmp_path: Path):
9 | # Arrange
10 | location = (1, 1, 0)
11 | source = None
12 | value = "replacement"
13 | project_directory = tmp_path
14 |
15 | # Act
16 | mutation = Mutation(location, source, value, project_directory)
17 |
18 | # Assert
19 | assert location == mutation.location
20 | assert source == mutation.source
21 | assert value == mutation.value
22 | assert project_directory == mutation.project_directory
23 |
24 | assert mutation.result is None
25 | assert mutation.crime_scenes == []
26 |
27 |
28 | def test_relative_path(tmp_path):
29 | # Arrange
30 | (tmp_path / "testdir").mkdir()
31 | sf_file: Path = tmp_path / "testdir" / "sf.txt"
32 | sf_file.write_text("some text", encoding="utf-8")
33 |
34 | sf = SourceFile(sf_file)
35 |
36 | mutation = Mutation(None, sf, None, tmp_path)
37 |
38 | # Act
39 | relative_path = mutation.relative_path
40 |
41 | # Assert
42 | assert Path("testdir/sf.txt") == Path(relative_path)
43 |
44 |
45 | def test_get_mutated_line(tmp_path):
46 | # Arrange
47 | file = tmp_path / "sf.txt"
48 | file.write_text("yeah\ngood", encoding="utf-8")
49 |
50 | sf = SourceFile(file)
51 | mutation = Mutation(None, None, None, None)
52 |
53 | # Act
54 | mutated_line = mutation._get_mutated_line(6, sf.file.read_text(encoding="utf-8"))
55 |
56 | # Assert
57 | assert mutated_line == (1, "good")
58 |
59 |
60 | def test_repr(tmp_path):
61 | # Arrange
62 | file = tmp_path / "sf.txt"
63 | file.write_text("yeah\ngood", encoding="utf-8")
64 |
65 | sf = SourceFile(file)
66 | mutation = Mutation((6, 1, 0), sf, "b", tmp_path)
67 | mutation.result = MutationResult.LIVED
68 | # Act
69 | repr_ = str(mutation)
70 | print(repr_)
71 | # Assert
72 | assert repr_ == \
73 | "Mutation:\n"\
74 | " File: " + str(file) + "\n" \
75 | " Line nr: 1\n" \
76 | " Result: Lived\n" \
77 | " Original line:\n" \
78 | " good\n" \
79 | " Mutated line:\n" \
80 | " gbod"
81 |
--------------------------------------------------------------------------------
/test/mutation/test_mutation_filter.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.core.filter import MutationFilter
2 | import pytest
3 |
4 |
5 | def test_mutation_filter_interface():
6 | # Arrange
7 | filter = MutationFilter()
8 |
9 | # Act
10 | with pytest.raises(NotImplementedError):
11 | filter.apply(None)
12 |
--------------------------------------------------------------------------------
/test/test_runner/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JoranHonig/vertigo/659b71cf763106c0d0fc8c1ce130658cece997c7/test/test_runner/__init__.py
--------------------------------------------------------------------------------
/test/test_runner/test_exceptions.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from eth_vertigo.test_runner.exceptions import TestRunException, TimedOut, DidNotCompile
4 |
5 |
6 | def test_exception():
7 | # Yet another coverage unit test
8 | with pytest.raises(TestRunException):
9 | raise TestRunException
10 |
11 | with pytest.raises(TimedOut):
12 | raise TimedOut
13 |
14 | with pytest.raises(DidNotCompile):
15 | raise DidNotCompile
16 |
--------------------------------------------------------------------------------
/test/test_runner/test_file_editor.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | from eth_vertigo.test_runner.file_editor import FileEditor
3 | import pytest
4 |
5 | CONTENT = u"aaaaa"
6 |
7 | def test_edit_in_range(tmp_path):
8 | # Arrange
9 | p: Path = tmp_path / "target.txt"
10 | p.write_text(CONTENT)
11 |
12 | file_editor = FileEditor()
13 |
14 | # Act
15 | file_editor.edit(str(p.absolute()), (1, 2, 0), 'bb')
16 |
17 | # Assert
18 | assert "abbaa" == p.read_text()
19 |
20 |
21 | def test_edit_outside_range(tmp_path):
22 | # Arrange
23 | p: Path = tmp_path / "target.txt"
24 | p.write_text(CONTENT)
25 |
26 | file_editor = FileEditor()
27 |
28 | # Act
29 | file_editor.edit(str(p.absolute()), (10, 2, 0), 'bb')
30 |
31 | # Assert
32 | assert "aaaaabb" == p.read_text()
33 |
34 | def test_edit_negative(tmp_path):
35 | # Arrange
36 | p: Path = tmp_path / "target.txt"
37 | p.write_text(CONTENT)
38 |
39 | file_editor = FileEditor()
40 |
41 | # Act and assert
42 | with pytest.raises(ValueError):
43 | file_editor.edit(str(p.absolute()), (-10, 2, 0), 'b')
44 |
45 |
--------------------------------------------------------------------------------
/test/test_runner/test_result_test.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.test_runner.test_result import TestResult
2 |
3 |
4 | def test_create():
5 | # Act
6 | test_result = TestResult(
7 | title="title",
8 | full_title="full title",
9 | duration=10,
10 | success=True
11 | )
12 |
13 | # Assert
14 | assert "title" == test_result.title
15 | assert "full title" == test_result.full_title
16 | assert 10 == test_result.duration
17 | assert True is test_result.success
18 |
--------------------------------------------------------------------------------
/test/test_runner/test_runner.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.test_runner.runner import Runner
2 | import pytest
3 |
4 | # This test is only here for coverage
5 |
6 |
7 | def test_abstract_methods():
8 | # Arrange
9 | runner = Runner()
10 |
11 | # Act and Assert
12 | with pytest.raises(NotImplementedError):
13 | runner.run_test(None, None)
14 |
15 | with pytest.raises(NotImplementedError):
16 | runner.run_tests(None)
17 |
18 | with pytest.raises(NotImplementedError):
19 | _ = runner.tests
20 |
--------------------------------------------------------------------------------
/test/test_runner/truffle/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JoranHonig/vertigo/659b71cf763106c0d0fc8c1ce130658cece997c7/test/test_runner/truffle/__init__.py
--------------------------------------------------------------------------------
/test/test_runner/truffle/test_truffle_runner.py:
--------------------------------------------------------------------------------
1 | import shutil
2 | from pathlib import Path
3 | import pytest
4 |
5 | from unittest.mock import MagicMock
6 |
7 | from eth_vertigo.core import Mutation
8 | from eth_vertigo.mutator.source_file import SourceFile
9 | from eth_vertigo.interfaces.truffle.compiler import TruffleCompiler
10 | from eth_vertigo.interfaces.truffle.tester import TruffleTester, _set_reporter, _set_include_tests
11 | from eth_vertigo.interfaces.common.tester import make_temp_directory, rm_temp_directory, apply_mutation
12 |
13 |
14 | def test_mk_tmp_truffle_directory(tmp_path: Path):
15 | # Arrange
16 | file = tmp_path / "file.txt" # type: Path
17 | file.write_text("example text")
18 |
19 | # Act
20 | created_dir = make_temp_directory(str(tmp_path))
21 | created_path = Path(created_dir)
22 | copied_file = created_path / "file.txt"
23 |
24 | # Assert
25 | try:
26 | assert file.read_text("utf-8") == copied_file.read_text('utf-8')
27 | finally:
28 | # Cleanup
29 | shutil.rmtree(created_dir)
30 |
31 |
32 | def test_rm_truffle_directory(tmp_path):
33 | # Arrange
34 | directory = make_temp_directory(str(tmp_path))
35 | dir_path = Path(directory)
36 |
37 | # Act
38 | rm_temp_directory(directory)
39 |
40 | # Assert
41 | assert dir_path.exists() is False
42 |
43 |
44 | def test_set_reporter(tmp_path: Path):
45 | # Arrange
46 | pre_text = "text_that was here before;"
47 | truffle_js = tmp_path / "truffle.js" # type: Path
48 | truffle_js.write_text(pre_text)
49 |
50 | # Act
51 | _set_reporter(str(tmp_path))
52 |
53 | # Assert
54 | expected = pre_text + \
55 | "\nmodule.exports.mocha = {reporter: \"json\"};\n" + \
56 | "\nmodule.exports.solc = {optimizer: { enabled: true, runs: 200}};\n"
57 | actual = truffle_js.read_text("utf-8")
58 | assert expected == actual
59 |
60 |
61 | def test_apply_mutation(tmp_path):
62 | # Arrange
63 | file_path = tmp_path / "mutator.sol" # type: Path
64 | file_path.touch()
65 | source_file = SourceFile(file_path)
66 | src_field = (1, 1, 0)
67 |
68 | mutation = Mutation(location=src_field, source=source_file, value="value", project_directory=tmp_path)
69 |
70 | # Act
71 | apply_mutation(mutation, str(tmp_path))
72 |
73 | # Assert
74 | assert "value" == file_path.read_text("utf-8")
75 |
76 |
77 |
78 | def test_truffle_runner_run_tests(tmp_path):
79 | # Arrange
80 | truffle_js = tmp_path / "truffle.js" # type: Path
81 | truffle_js.touch()
82 |
83 | expected_test_result = {"test_result": True}
84 | truffle_tester = TruffleTester(str(truffle_js), str(tmp_path), TruffleCompiler(str(truffle_js)))
85 | truffle_tester.run_test_command = MagicMock(return_value=expected_test_result)
86 |
87 | # Act
88 | test_result = truffle_tester.run_tests()
89 |
90 | # Assert
91 | assert expected_test_result == test_result
92 |
93 |
94 | def test_truffle_runner_run_coverage(tmp_path):
95 | # Arrange
96 | truffle_js = tmp_path / "truffle.js" # type: Path
97 | truffle_js.touch()
98 |
99 | expected_test_result = {"test_result": True}
100 | truffle_tester = TruffleTester(str(truffle_js), str(tmp_path), TruffleCompiler(str(truffle_js)))
101 | truffle_tester.run_test_command = MagicMock(return_value=expected_test_result)
102 |
103 | # Act and Assert
104 | with pytest.raises(NotImplementedError):
105 | truffle_tester.run_tests(coverage=True)
106 |
107 |
108 | def test_truffle_runner_run_test_with_mutation(tmp_path):
109 | # Arrange
110 | file_path = tmp_path / "mutator.sol" # type: Path
111 | file_path.touch()
112 | source_file = SourceFile(file_path)
113 | src_field = (1, 1, 0)
114 |
115 | mutation = Mutation(location=src_field, source=source_file, value="value", project_directory=tmp_path)
116 |
117 | truffle_js = tmp_path / "truffle.js" # type: Path
118 | truffle_js.touch()
119 |
120 | expected_test_result = {"test_result": True}
121 | truffle_tester = TruffleTester(str(truffle_js), str(tmp_path), TruffleCompiler(str(truffle_js)))
122 | truffle_tester.run_test_command = MagicMock(return_value=expected_test_result)
123 |
124 | # Act
125 | test_result = truffle_tester.run_tests(mutation=mutation)
126 |
127 | # Assert
128 | assert expected_test_result == test_result
129 |
--------------------------------------------------------------------------------
/vertigo.py:
--------------------------------------------------------------------------------
1 | from eth_vertigo.cli.main import cli
2 |
3 | cli()
--------------------------------------------------------------------------------