├── .eslintrc.json ├── .github └── workflows │ └── node.js.yml ├── .gitignore ├── .mocharc.js ├── .prettierrc ├── LICENSE ├── README.md ├── docs ├── templates.md └── test-runner.md ├── images ├── chart-downloads-slide.png ├── chart-popularity-slide.png ├── comparing-javascript-test-runners.png ├── jest-cli1.png ├── jest-cli2.png ├── jest-cli3.png ├── logo-ava.png ├── logo-jest.png ├── logo-mocha.png ├── output-ava.png ├── output-jest.png ├── output-mocha.png ├── output-mpt.png └── speed-test-results.png ├── jest.config.js ├── package-lock.json ├── package.json ├── src ├── fullCircle.js ├── generateTests │ ├── config.js │ ├── generateTestFiles.js │ └── index.js ├── runAllTests.js └── utils │ ├── date.js │ ├── index.js │ ├── range.js │ └── shuffle.js ├── test ├── ava │ └── .gitkeep ├── jest │ └── .gitkeep ├── mocha │ └── .gitkeep ├── src │ ├── .eslintrc.json │ ├── date.spec.js │ ├── range.spec.js │ └── shuffle.spec.js └── templates │ ├── ava.js │ ├── jest.js │ └── mocha.js └── testSetup.js /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["airbnb-base", "plugin:prettier/recommended"], 3 | "plugins": ["prettier"], 4 | "root": true, 5 | "rules": { 6 | "import/prefer-default-export": 0, 7 | "prettier/prettier": "error" 8 | }, 9 | "env": { 10 | "mocha": true, 11 | "node": true 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /.github/workflows/node.js.yml: -------------------------------------------------------------------------------- 1 | # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions 3 | 4 | name: Node.js CI 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | pull_request: 10 | branches: [ master ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | 17 | strategy: 18 | matrix: 19 | node-version: [10.x, 12.x, 14.x] 20 | 21 | steps: 22 | - uses: actions/checkout@v2 23 | - name: Use Node.js ${{ matrix.node-version }} 24 | uses: actions/setup-node@v1 25 | with: 26 | node-version: ${{ matrix.node-version }} 27 | - run: npm ci 28 | - run: npm test 29 | - run: npm run make-tests 30 | - run: npm run test-all 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .nyc_output 3 | coverage 4 | .DS_Store 5 | .vscode/ 6 | 7 | # Tests that can be rebuilt 8 | test/ava/*.spec.js 9 | test/jest/*.spec.js 10 | test/mocha/*.spec.js 11 | -------------------------------------------------------------------------------- /.mocharc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | recursive: true, 3 | timeout: 10000, 4 | }; 5 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "trailingComma": "es5" 4 | } 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Dave Cohen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Comparing JavaScript Test Runners 2 | 3 | ![comparing-javascript-test-runners.png](images/comparing-javascript-test-runners.png) 4 | 5 | 6 | 7 | This article is a comparison of the AVA, Jest, and Mocha JavaScript test runners. [Permalink](https://github.com/scraggo/comparing-javascript-test-runners/blob/master/README.md) 8 | 9 | To generate the speed metrics in the article, I created a node application (part of this repo) that runs tests in all the frameworks listed above. [See the documentation here](https://github.com/scraggo/comparing-javascript-test-runners/blob/master/docs/test-runner.md). 10 | 11 | - [Comparing JavaScript Test Runners](#comparing-javascript-test-runners) 12 | - [Overview](#overview) 13 | - [The Problem](#the-problem) 14 | - [Goals](#goals) 15 | - [Testing in general](#testing-in-general) 16 | - [Documentation: writing tests that outline the functionality of the application](#documentation-writing-tests-that-outline-the-functionality-of-the-application) 17 | - [Philosophy: "What" should we test? What level of "granularity" are we aiming for?](#philosophy-what-should-we-test-what-level-of-granularity-are-we-aiming-for) 18 | - [State: the pros and cons of sharing state between tests](#state-the-pros-and-cons-of-sharing-state-between-tests) 19 | - [Coverage: the extent to which one should measure test coverage](#coverage-the-extent-to-which-one-should-measure-test-coverage) 20 | - [Tips](#tips) 21 | - [Requirements](#requirements) 22 | - [Popularity and Community](#popularity-and-community) 23 | - [Speed](#speed) 24 | - [Ease of Use](#ease-of-use) 25 | - [Failure Reporting and Debugging](#failure-reporting-and-debugging) 26 | - [Works with your framework and environment of choice (React, Redux, Electron, etc)](#works-with-your-framework-and-environment-of-choice-react-redux-electron-etc) 27 | - [Nice to Have](#nice-to-have) 28 | - [Comparing the Test Runners](#comparing-the-test-runners) 29 | - [AVA](#ava) 30 | - [Jest](#jest) 31 | - [Mocha](#mocha) 32 | - [mocha-parallel-tests](#mocha-parallel-tests) 33 | - [Popularity and Community Comparison](#popularity-and-community-comparison) 34 | - [Speed Comparison](#speed-comparison) 35 | - [What do "serial" and "parallel" mean?](#what-do-serial-and-parallel-mean) 36 | - [Benchmarks](#benchmarks) 37 | - [Ease of Use Comparison](#ease-of-use-comparison) 38 | - [Amount of necessary configuration/dependencies](#amount-of-necessary-configurationdependencies) 39 | - [Writing the tests](#writing-the-tests) 40 | - [Running the tests](#running-the-tests) 41 | - [Failure Reporting and Debugging Comparison](#failure-reporting-and-debugging-comparison) 42 | - [Works with your framework and environment of choice (React, Redux, Electron, etc) Comparison](#works-with-your-framework-and-environment-of-choice-react-redux-electron-etc-comparison) 43 | - [Full Comparison (with "Nice to Haves")](#full-comparison-with-nice-to-haves) 44 | - [Recommendations](#recommendations) 45 | - [Conclusion](#conclusion) 46 | - [Want to contribute?](#want-to-contribute) 47 | - [External Resources](#external-resources) 48 | 49 | ## Overview 50 | 51 | ### The Problem 52 | 53 | Technology is always changing. As a result, the popular frameworks of today may become unpopular tomorrow. Despite this, we can assume that code should be tested and there are general principles we can adhere to. 54 | 55 | In recent years, JavaScript has become a more robust language thanks to the steady cadence of enhancements starting with ES6/2015. As a result, many front-end frameworks can accomplish the task of creating serious web applications. In general, these frameworks are much easier to test than frameworks of years past. Three test runners have risen far above the rest as the most popular choices: 56 | 57 | - AVA - 58 | - Jest - 59 | - Mocha - 60 | 61 | ### Goals 62 | 63 | The immediate practical goal of this article is to help you choose a JavaScript testing framework. (Note: I'll be using "test runner" and "testing framework" interchangeably.) My aim is to address the following questions: 64 | 65 | **How does one choose the right testing framework for their use case? What criteria should one base their decision on?** 66 | 67 | In order to do this, we'll explore some general principles regarding both testing frameworks and testing in general. Then, after outlining the criteria for evaluating the frameworks, we can explore them in detail. 68 | 69 | ## Testing in general 70 | 71 | There are differing opinions on what the "best practices" are for testing code. I have a pragmatic opinion - we should test what is necessary to gain confidence that our application is running correctly and we should do it in a way that leverages existing tools that have been refined and are trusted by the developer community. 72 | 73 | The general principles I think are important are: 74 | 75 | ### Documentation: writing tests that outline the functionality of the application 76 | 77 | - A test description should be simple to understand. Written in this way, the tests are another form of documentation for the source code. A product owner should be able to read through the tests and understand their relation to the application as a whole. 78 | - In the case of testing abstractions, we should take time to be even clearer about the utility of such a test. 79 | 80 | ### Philosophy: "What" should we test? What level of "granularity" are we aiming for? 81 | 82 | - A unit test has the smallest scope of the test types. What may be under test is a function or a class. 83 | - An integration test has a wider scope than a unit test. This sort of test combines individual units and tests them as a group with the aim of assuring that they run correctly when interacting with each other. (We want to avoid is the scenario where units work perfectly in isolation, but not together.) 84 | - An acceptance test (a.k.a. end to end test) has the widest scope. It's goal is to test as an end user would use the application without directly calling source code. 85 | - When in doubt: 86 | - It should be clear where unit, integration, and acceptance tests are. 87 | - Think of the unit under test as a "black box" whenever possible. The goal should be to test the _behavior_ of the unit, not how it's implemented. 88 | - Don't test external library code. We should be using high-quality libraries that have their own test suites. 89 | 90 | ### State: the pros and cons of sharing state between tests 91 | 92 | - Ideally, one doesn't share state between tests, period. Test suites (a file that contains test blocks) should definitely aim to steer clear of shared state. Individual test blocks should have a clean version of the unit under test. 93 | - In a few cases, shared setup between test blocks may be necessary. An example of this is when we're making assertions during an end-to-end test that has many steps. 94 | 95 | ### Coverage: the extent to which one should measure test coverage 96 | 97 | - Using test coverage tools gives us a metric for how many lines, statements, or blocks of your code are tested. Your team may decide that 100% test coverage is necessary. My feeling is that you should cover all _essential_ functions, the goal being that after changing code, your tests will fail if your changes affected anything that it could have affected. 98 | - Another "metric" that I like to use while reviewing code is analyzing the changes for the big picture: Do the tests cover what was in the "acceptance criteria" of the task? 99 | 100 | ### Tips 101 | 102 | - Learn more about testing by reading the test specs of a well-written library. 103 | - Is an external library under-documented? Read through the test-suites to get a quick view into how it works. (Reading the open and closed issues may be helpful too.) 104 | 105 | ## Requirements 106 | 107 | Now that we've outlined some testing concepts, we can dive into what we'll require from a test framework. 108 | 109 | ### Popularity and Community 110 | 111 | A testing framework should have community support. An unpopular framework may be out of date or may not have the kinks ironed out. It might have incomplete documentation (including stack overflow questions.) It might not have enough developers working on it to fix its issues. 112 | 113 | ### Speed 114 | 115 | A testing framework should not be slow. We can only define this _relatively_ - for one person's _slow_ may be another person's _acceptable_. We will be benchmarking speed for all the frameworks. 116 | 117 | ### Ease of Use 118 | 119 | A testing framework should be un-difficult to use. The setup, configuration, command-line options, and test writing itself should be _relatively_ straightforward. If it's too confusing to a developer to use, it will be less likely that tests will get written. 120 | 121 | ### Failure Reporting and Debugging 122 | 123 | A testing framework should give ample information when a particular test fails. We should know exactly which test failed and the stack trace of the unit being called. We should also be able to put breakpoints in with our favorite debugging tools. 124 | 125 | ### Works with your framework and environment of choice (React, Redux, Electron, etc) 126 | 127 | A testing framework must be compatible with what you're trying to test. It should be flexible enough to be able to adapt to the changing needs of those frameworks. 128 | 129 | - Ability to run in multiple environments: `node`, in-browser, CI/CD 130 | - Versatility to run unit, integration, and end-to-end tests. 131 | 132 | ### Nice to Have 133 | 134 | Depending on what you need to test, a framework should support: 135 | 136 | - Organization strategies: `describe` blocks, `it` blocks 137 | - "watch" and "inspect" modes 138 | - A variety of assertion capabilities 139 | - Ability to add tools like coverage (`nyc`), snapshot testing, etc. 140 | - Mocking / Injecting modules (intercepting require statements) 141 | - Webpack compilation (injecting of webpack-defined global variables) 142 | - simulating a CICD build step 143 | - Babel transpilation of ESNext code 144 | - Use of alias module import statements (removing the need for `../../`) 145 | 146 | ## Comparing the Test Runners 147 | 148 | Now, onto comparing the test runners themselves. Here's an overview of each one: 149 | 150 | ### AVA 151 | 152 | ![Ava Logo](images/logo-ava.png) 153 | 154 | 155 | 156 | The magic of AVA is in its simplicity. It's minimal, fast, concurrent, and has a simple syntax that entirely removes the use of globals (like `describe`, `it`, etc.) It supports asynchronous behavior out of the box. AVA has a small team of three developers, one being open-source heavyweight `@sindresorhus`. Some other selling points directly from their readme: 157 | 158 | > AVA is a test runner for Node.js with a concise API, detailed error output, embrace of new language features and process isolation that let you write tests more effectively. 159 | > 160 | > AVA adds code excerpts and clean diffs for actual and expected values. If values in the assertion are objects or arrays, only a diff is displayed, to remove the noise and focus on the problem. 161 | > 162 | > AVA automatically removes unrelated lines in stack traces, allowing you to find the source of an error much faster, as seen above. 163 | > 164 | > AVA automatically detects whether your CI environment supports parallel builds. Each build will run a subset of all test files, while still making sure all tests get executed. See the ci-parallel-vars package for a list of supported CI environments. 165 | 166 | ### Jest 167 | 168 | ![Jest Logo](images/logo-jest.png) 169 | 170 | 171 | 172 | Jest is feature-packed, aiming to solve _everything_ in one package, with a focus on making the experience delightful for the test author. It's written and maintained by Facebook and is extremely popular and community supported due to the ubiquity of React and `create-react-app`. The CLI output is colorful and interactive with detailed exception reporting and diffing. Snapshot testing, mocking, and coverage reporting are all built-in. Also included are globals like `it` and `describe` as well as a custom assertion library (similar to `chai`). It also touts: 173 | 174 | > zero config - Jest aims to work out of the box, config free, on most JavaScript projects. 175 | > 176 | > isolated - Tests are parallelized by running them in their own processes to maximize performance. 177 | > 178 | > great api - From `it` to `expect` - Jest has the entire toolkit in one place. Well documented, well maintained, well good. 179 | 180 | ### Mocha 181 | 182 | ![Mocha Logo](images/logo-mocha.png) 183 | 184 | 185 | 186 | Being the most established of the testing frameworks, Mocha enjoys a solid place in the JavaScript community. It's been around since 2011 and is maintained by the [OpenJS Foundation](https://openjsf.org/projects/) (growth stage) and contributors. Mocha supports numerous command-line options and configurations. It's generally used in tandem with external libraries - `assert` or `chai` could take care of your assertion needs and `sinon` could take care of your mocking needs. The `it` and `describe` blocks mentioned by Jest were pioneered by Mocha (along with the `beforeEach`, `afterEach`, and other pre/post hooks). In addition to being able to run in `node`, you can also run tests in the browser giving you full access to the DOM. There's also a dizzying array of test reporting styles (one being Nyan cat.) In its own words: 187 | 188 | > Mocha is a feature-rich JavaScript test framework running on Node.js and in the browser, making asynchronous testing simple and fun. Mocha tests run serially, allowing for flexible and accurate reporting, while mapping uncaught exceptions to the correct test cases. 189 | > 190 | > Mocha is the most-depended-upon module on npm (source: libraries.io) 191 | > 192 | > The [SuperAgent request library](https://visionmedia.github.io/superagent/) test documentation was generated with [Mocha's "doc" reporter](https://mochajs.org/#doc) 193 | 194 | _Update [8.0.0 / 2020-06-10](https://github.com/mochajs/mocha/blob/master/CHANGELOG.md#800--2020-06-10): Mocha 8 has built-in support for running tests in parallel!_ 195 | 196 | #### mocha-parallel-tests 197 | 198 | 199 | 200 | ⚠️ Important note: As of [Mocha 8x](https://github.com/mochajs/mocha/blob/master/CHANGELOG.md#800--2020-06-10), there's built-in support for running tests in parallel. In case you can't currently upgrade to mocha 8x, `mocha-parallel-tests` is a viable choice! 201 | 202 | `mocha-parallel-tests` is not a testing framework. It's a wrapper over Mocha designed to significantly speed it up. It's new in 2019 and has a small team. I'll go into detail on why I'm including it here (and what "parallel" means) in the "speed" portion of this article. From the readme: 203 | 204 | > `mocha-parallel-tests` is a test runner for tests written with mocha testing framework which allows you to run them in parallel. 205 | > 206 | > `mocha-parallel-tests` executes each of your test files in a separate process while maintaining the output structure of mocha. 207 | > 208 | > Compared to the other tools which try to parallelize mocha tests execution, `mocha-parallel-tests` doesn't require you to write the code in a different way or use some specific APIs - just run your tests with `mocha-parallel-tests` instead of mocha and you will see the difference. Or if you prefer to use mocha programmatic API replace it with `mocha-parallel-tests` default export and you're done! 209 | > 210 | > If you're using Node.JS >= 12 your tests execution will be even faster because `mocha-parallel-tests` supports running tests with Node.JS worker threads API. 211 | 212 | ### Popularity and Community Comparison 213 | 214 | Now that we know a bit about each framework, lets look at some of their popularity, publish frequency, and other community metrics. 215 | 216 | ![chart-popularity-slide](images/chart-popularity-slide.png) 217 | ![chart-downloads-slide](images/chart-downloads-slide.png) 218 | 219 | > Charts made with 220 | 221 | Overall, we can see that _all_ the frameworks are rising in popularity. To me, this indicates that more people are writing JavaScript applications and testing them - which is quite exciting. The fact that none of them are on a downward trend makes all of them viable in this category. 222 | 223 | | | Weekly Downloads \* | Last Publish | Publishes in 1 Year | Contributors | 224 | | -------------------- | ------------------- | ------------ | ------------------- | ------------ | 225 | | Jest | 7.2 million | 2020-05-05 | 27 | 1083 | 226 | | Mocha | 4.3 million | 2020-04-24 | 11 | 439 | 227 | | AVA | 227,179 | 2020-05-08 | 20 | 243 | 228 | | mocha-parallel-tests | 18,097 | 2020-02-08 | 4 | 14 | 229 | 230 | \* Weekly Downloads as of May 15, 2020 231 | 232 | 🥇Jest is clearly the most popular framework with 7.2 million weekly downloads. It was published most recently and is updated very frequently. Its popularity can be partially attributed to the popularity of the React library. Jest is shipped with `create-react-app` and is recommended for use in React's documentation. 233 | 234 | 🥈Mocha comes in second place with 4.3 million weekly downloads. It was the de facto standard long before Jest hit the scene and is the test runner of many, many applications. It isn't published as frequently as the other two which I believe is a testament to it being tried, true, and more stable. 235 | 236 | 🥉AVA has 227,179 weekly downloads, an order of magnitude fewer than the most popular frameworks. It is published frequently, which positively signals a focus on improvement and iteration. This may be due to its (arguably niche) focus on minimalism or it having a small team that doesn't have the resources to promote the library. 237 | 238 | `mocha-parallel-tests` has 18,097 weekly downloads and doesn't enjoy as frequent updates as the major three. It's extremely new and not a framework. 239 | 240 | In general, more popularity brings more community involvement. The number of open and closed issues tends to increase as a result. To create a loose maintenance ratio \*metric , we divide the open issues by the total number of issues (open + closed issues): 241 | 242 | | | Open Issues | Closed Issues | Total | Ratio | 243 | | -------------------- | ----------- | ------------- | ----- | ----- | 244 | | Mocha | 254 | 2225 | 2479 | 10.2% | 245 | | AVA | 154 | 1169 | 1323 | 11.6% | 246 | | Jest | 844 | 4343 | 5187 | 16.2% | 247 | | mocha-parallel-tests | 37 | 111 | 148 | 25.0% | 248 | 249 | 🥇Mocha has the lowest ratio of open to closed issues, making it the most successfully maintained library. It's stability surely correlates with its longevity (and vice versa.) 250 | 251 | 🥈AVA is 2nd place. This is quite impressive given its small team. 252 | 253 | 🥉Jest is 3rd place. This comes as no surprise given that it has the most issues to deal with. 254 | 255 | `mocha-parallel-tests` has the fewest number of total issues by far but the highest ratio. It doesn't have a significant financial backing like the other frameworks do. 256 | 257 | \* Caveat: I'm assuming the open issues in these libraries aren't crippling to the core functionality of the library. 258 | 259 | ### Speed Comparison 260 | 261 | Before we get to the comparison, I'd like to discuss a few concepts. All of the frameworks run the tests in "parallel", with the exception of Mocha, which runs its tests in "serial." 262 | 263 | #### What do "serial" and "parallel" mean? 264 | 265 | "Serial" - one at a time, ie - the first must complete before the second, the second must complete before the third, etc. Code _may_ run asynchronously but doesn't start a separate process. This type of processing is also known as sequential processing. 266 | 267 | "Parallel" - happening simultaneously, ie - the first, second, third, etc can happen at the same time. Multiple tasks are completed at a time by different processes (which may be different threads or literally different processors). 268 | 269 | For all of the frameworks with parallel capabilities, only separate test files are run in parallel. `describe` and `it` blocks in a given file/suite are run serially. Given this, writing more test files and putting slow tests can be put into their own files may increase the speed of running the complete test suite. 270 | 271 | #### Benchmarks 272 | 273 | To generate the speed metrics in the article, I created a node application that runs tests in all the frameworks listed above. [The documentation for it](https://github.com/scraggo/comparing-javascript-test-runners/blob/master/docs/test-runner.md) explains how I wrote and ran the tests. The aim was to simulate a "true" test run in a significantly sized enterprise codebase. Here are the results (`node` version 12): 274 | 275 | | | Speed | Type | 276 | | ---------------------- | ----- | -------- | 277 | | mocha 8.x `--parallel` | 5.3s | parallel | 278 | | mocha-parallel-tests | 7.3s | parallel | 279 | | AVA | 9.6s | parallel | 280 | | Jest | 12.5s | parallel | 281 | | Mocha | 16.2s | serial | 282 | 283 | A caveat with all benchmarking tests: the hardware environment (the make, model, RAM, processes running, etc) will affect measured results. For this reason, we'll only be considering the speeds relative to each other. 284 | 285 | 🥇Mocha 8.x with `--parallel` enabled is the fastest of the frameworks in this run (and most runs.) (Mocha versions prior to 8.x can't enable this option.) 286 | 287 | - [Mocha's docs on the new flag](https://mochajs.org/#-parallel-p) 288 | - [This is a great resource on getting up and running with this new flag.](https://developer.ibm.com/technologies/node-js/articles/parallel-tests-mocha-v8/) 289 | 290 | 🥈`mocha-parallel-tests` and AVA are close behind (AVA actually ran faster than `mocha-parallel-tests` in a few of the runs.) 291 | 292 | 🥉Jest is also fast, but seems to have a bit more overhead than the other two. 293 | 294 | Mocha in serial mode lags far behind the parallel runners - which is to be expected because it tests must take "real" time to execute, one after the other. If speed is your most important criteria (and its drawbacks are not an issue), you'll see a 200-1000% increase in test speed using `mocha-parallel-tests` instead (depending on your machine, `node` version, and the tests themselves). 295 | 296 | ### Ease of Use Comparison 297 | 298 | I'll split "ease of use" into a few categories: 299 | 300 | - Amount of necessary configuration/dependencies 301 | - Writing the tests 302 | - Running the tests 303 | 304 | ### Amount of necessary configuration/dependencies 305 | 306 | | | Configuration | Dependencies | 307 | | ---------------------------- | -------------------------------------- | ------------------------------------------------------------------------------------ | 308 | | Jest | close-to-zero-config: lots of defaults | All dependencies included: snapshot testing, mocking, coverage reporting, assertions | 309 | | AVA | Sensible defaults | some externals necessary. Included: snapshot testing, assertions | 310 | | Mocha & mocha-parallel-tests | Many, many options | most externals necessary (all if in-browser) | 311 | 312 | 🥇Jest takes the cake in this department. Using its defaults wherever possible, you could have close to zero configuration. 313 | 314 | > Jest's configuration can be defined in the package.json file of your project, or through a jest.config.js file or through the --config option. If you'd like to use your package.json to store Jest's config, the "jest" key should be used on the top level so Jest will know how to find your settings 315 | 316 | - Sensible defaults for finding tests `(default: [ "**/__tests__/**/*.[jt]s?(x)", "**/?(*.)+(spec|test).[jt]s?(x)" ])` 317 | - A huge number of options 318 | - Built-in snapshot tests, coverage reporting, mocking modules and libraries 319 | - Excellent documentation, lots of tutorials and examples 320 | 321 | 🥈AVA comes in 2nd place. 322 | 323 | - Sensible defaults for finding tests (see Jest's defaults) 324 | - Configure in package.json, an ava.config.\* file, or another override file in the directory root 325 | - Many CLI options 326 | - Built-in snapshot tests 327 | - `@ava/babel` for Babel compilation 328 | - `@ava/typescript` for TypeScript support 329 | - Good documentation, few tutorials and examples 330 | - Coverage reporting, mocking modules and libraries must be imported from elsewhere 331 | 332 | 🥉Mocha comes in 3rd place. 333 | 334 | > By default, mocha looks for the glob `"./test/*.js"`, so you may want to put your tests in `test/` folder. If you want to include subdirectories, pass the `--recursive` option. 335 | 336 | - One default for finding tests (above) 337 | - Configure in package.json or an override file 338 | - Many, many CLI options 339 | - Good documentation (slightly opaque and a lot to read through), lots of tutorials and examples (in and out of Mocha's docs) 340 | - Assertions\*, coverage reporting, snapshot tests, mocking modules and libraries (everything) must be imported from elsewhere 341 | 342 | \* node's built-in `assert` is commonly used with Mocha for assertions. While it's not built into Mocha, it can be easily imported: `const assert = require('assert')`. If testing in-browser, you wouldn't have access to `assert` and would have to use a library like `chai`. 343 | 344 | For mocha-parallel-tests, run tests as you would with Mocha. There is a caveat: 345 | 346 | > Most of mocha CLI options are supported. If you're missing some of the options support you're welcome to submit a PR: all options are applied in a same simple way. 347 | 348 | #### Writing the tests 349 | 350 | | | Summary | 351 | | ---------------------------- | ----------------------------------- | 352 | | Mocha & mocha-parallel-tests | `describe` and `it` blocks | 353 | | Jest | like Mocha, but everything built in | 354 | | AVA | import test context, customizable | 355 | 356 | Mocha's influence on test-writing is undeniable. From [Mocha's getting started section](https://mochajs.org/#getting-started), we can see how tests are organized in nested `describe` blocks that can contain any number of `it` blocks which make test assertions. 357 | 358 | ```js 359 | const assert = require('assert'); // only works in node 360 | describe('Array', function() { 361 | describe('#indexOf()', function() { 362 | it('should return -1 when the value is not present', function() { 363 | assert.equal([1, 2, 3].indexOf(4), -1); 364 | }); 365 | }); 366 | }); 367 | ``` 368 | 369 | [Chai's `expect`](https://www.chaijs.com/) is commonly used instead of assert: 370 | 371 | ```js 372 | const { expect } = require('chai'); // works in both node and browser 373 | 374 | it('should return -1 when the value is not present', function() { 375 | expect([1, 2, 3].indexOf(4)).to.equal(-1); 376 | }); 377 | ``` 378 | 379 | > More Mocha test examples can be found here: 380 | 381 | Jest follows Mocha's example, but everything is included (no need to use external assertion, etc libraries). A lot of the syntax is compatible with Mocha, for example - `it` can be used instead of `test`: 382 | 383 | ```js 384 | describe('Array', function() { 385 | describe('#indexOf()', function() { 386 | test('should return -1 when the value is not present', function() { 387 | expect([1, 2, 3].indexOf(4)).toEqual(-1); 388 | }); 389 | }); 390 | }); 391 | ``` 392 | 393 | (Notice that Jest's `toEqual` is very much like chai's `to.equal`. Many of Jest's assertions are camel-cased analogues of chai's assertions.) 394 | 395 | AVA takes a different approach to writing tests. It prides itself on not injecting globals into your tests. Everything comes from `test` and the `t` "execution context" variable in the callback: 396 | 397 | ```js 398 | // Array-indexOf.spec.js 399 | const test = require('ava'); 400 | 401 | test('should return -1 when the value is not present', t => { 402 | t.is([1, 2, 3].indexOf(4), -1); 403 | }); 404 | ``` 405 | 406 | Built-in assertions are available on the `t` object: `true`, `false`, `truthy`, `falsy`, `is`, etc. You can also create custom assertions. 407 | 408 | Like Mocha and Jest, AVA has `before`, `beforeEach`, `after`, and `afterEach` hooks documented [here](https://github.com/avajs/ava/blob/master/docs/01-writing-tests.md#before--after-hooks). These work on a per-file basis. 409 | 410 | AVA does not have a way to nest test blocks (an ability `describe` affords in Mocha and Jest.) [This issue](https://github.com/avajs/ava/issues/222) goes into detail on why the maintainers haven't adopted this functionality. [This article](https://stackoverflow.com/questions/41269085/why-doesnt-js-testing-library-ava-have-suites-or-any-other-groupings) has some alternatives. 411 | 412 | An excellent [example of a single AVA test suite](https://github.com/sindresorhus/is/blob/master/test/test.ts) shows a wide range of AVA's capabilities. 413 | 414 | Since the frameworks have drastically different styles and similar capabilities, this was difficult to rank from easiest to most difficult. AVA has a noticeable lack of organizing tests with nested `describe` blocks, but its contained API is extremely flexible. Mocha forces the user to make decisions on which libraries to use, but this makes it almost limitless in capability. Jest includes _everything_, but some of the built-in magic makes it difficult or confusing to get certain things done ( see [this issue on mocks](https://github.com/facebook/jest/issues/2567) ). 415 | 416 | #### Running the tests 417 | 418 | | | Summary | 419 | | ---------------------------- | ------------------------------ | 420 | | Jest | interactive CLI or GUI | 421 | | Mocha & mocha-parallel-tests | non-interactive CLI or browser | 422 | | AVA | non-interactive CLI | 423 | 424 | 🥇Jest has an incredible interactive command line interface. (Using [Majestic](https://github.com/Raathigesh/majestic/) adds a web-based GUI to the experience.) There are numerous options for choosing which tests run and updating snapshots - all keyboard-driven. It watches for test file changes in watch mode and _only runs the tests that have been updated_. There isn't as much of a need to use `.only` because filtering terms is a breeze. 425 | 426 | ![Jest CLI1](images/jest-cli1.png) 427 | ![Jest CLI2](images/jest-cli2.png) 428 | ![Jest CLI3](images/jest-cli3.png) 429 | 430 | It's text output is not only a pleasure to look at, it also includes all the information you could need: 431 | 432 | ```txt 433 | ... 434 | PASS test/file.spec.js 435 | ... 436 | 437 | Test Suites: 50 passed, 50 total 438 | Tests: 1250 passed, 1250 total 439 | Snapshots: 0 total 440 | Time: 10.753s 441 | Ran all test suites. 442 | ``` 443 | 444 | ![Jest Output](images/output-jest.png) 445 | 446 | _Above: Jest output on successful test run_ 447 | 448 | 🥈Mocha, being highly configurable, sometimes necessitates long and difficult to read commands to run test suites. Once this command is set, it may need to be altered to filter for files. I wind up making a lot of slightly varied package.json script commands to capture the variations I'm looking for. Once these are set, tests run smoothly and the output and diffs are informative and legible. (Also remember the wide range of output styles available.) 449 | 450 | ![Mocha Output](images/output-mocha.png) 451 | 452 | ![Mocha Parallel Tests Output](images/output-mpt.png) 453 | 454 | _Above: Mocha and mocha-parallel-tests output_ 455 | 456 | 🥉AVA is highly configurable, but there are a few things not included by default that I miss. One is seeing the output of all the tests. Enable `verbose` to do that. Another is seeing the time it takes to execute the complete test suite. Adding `time` (a `bash` function) before the command, but it's not as immediately comprehendible - [see this issue](https://github.com/avajs/ava/pull/322). AVA's defaults are extremely minimal, so like Mocha, you may need a complex configuration to serve your needs. Filtering for tests is very similar to the other frameworks (there's a `--match, -m` command that can be repeated.) 457 | 458 | ![AVA Output](images/output-ava.png) 459 | 460 | _Above: AVA output. Refer to "real" for time taken to run all the tests_ 461 | 462 | ### Failure Reporting and Debugging Comparison 463 | 464 | All of the frameworks have pleasant to read output, detailed informative diffs for failures, and stack traces that show exactly where errors occurred. They all also offer the ability to debug with node or debugger of your choice. Mocha has an option to enable full stack traces, which may make it slightly more optimal for test failure troubleshooting. 465 | 466 | Mocha 467 | 468 | - [Debugging with node](https://mochajs.org/#-inspect-inspect-brk-inspect) 469 | - [Enable full stack traces](https://mochajs.org/#-full-trace) 470 | 471 | Jest 472 | 473 | - [Troubleshooting and debugging](https://jestjs.io/docs/en/troubleshooting) 474 | 475 | AVA 476 | 477 | - See `debug` 478 | 479 | ### Works with your framework and environment of choice (React, Redux, Electron, etc) Comparison 480 | 481 | 🥇Mocha works for everything. I've had success running React applications and end-to-end tests with Spectron (for Electron applications), amongst other things. It can also run in browser, which is extremely helpful for testing applications that use libraries which are tied directly to browser functionality. 482 | 483 | 🥈Jest is an all-purpose test-runner and is recommended for testing React applications. It doesn't have support for browser testing (as far as I can tell.) It supports [Puppeteer](https://jestjs.io/docs/en/puppeteer) for acceptance testing (and may support others). 484 | 485 | 🥉AVA is an all-purpose test-runner, though, it has [yet to support browser testing](https://github.com/avajs/ava/blob/master/docs/recipes/browser-testing.md). It supports [Puppeteer](https://github.com/avajs/ava/blob/master/docs/recipes/puppeteer.md) for acceptance testing (and may support others). 486 | 487 | ### Full Comparison (with "Nice to Haves") 488 | 489 | Let's recap our findings and fill in some gaps with our "nice to haves." (M8 = Mocha 8.x with `--parallel` enabled. MPT = `mocha-parallel-tests`) 490 | 491 | | Feature | Notes | 492 | | ------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | 493 | | Popularity | Jest - 1st place. Mocha - 2nd place. AVA - 3rd place. MPT - 4th place. | 494 | | Maintenance | Mocha - 1st place. AVA - 2nd place. Jest - 3rd place. MPT - 4th place. | 495 | | Speed | M8/MPT - 1st place. AVA - 2nd place. Jest - 3rd place. Mocha - 4th place. | 496 | | Amount of necessary configuration/dependencies | Jest - 1st place. AVA - 2nd place. Mocha - 3rd place. MPT - 4th place. | 497 | | Organization strategies: `describe` and `it` blocks | Mocha and Jest use these, AVA does not (omits all test globals) | 498 | | Running the tests | Jest - 1st place. Mocha - 2nd place. MPT - 3rd place. AVA - 4th place. | 499 | | Failure Reporting and Debugging | Mocha slightly above others | 500 | | Works with your framework and environment of choice | Mocha wins for supporting browser testing | 501 | | "watch" and "inspect" modes | Included in all the frameworks. | 502 | | A variety of assertion capabilities | All the frameworks have these as well as the ability to add custom matchers | 503 | | Ability to add tools like coverage (nyc), snapshot testing, etc | Mocha and AVA allow you to add what you need, Jest has these built-in | 504 | | Mocking / Injecting modules (intercepting require statements) | Jest has its own module/library mocking system. Mocha and AVA require external libraries (examples: [sinon](https://sinonjs.org/), [proxyquire](https://github.com/thlorenz/proxyquire), [inject-loader](https://github.com/plasticine/inject-loader)) | 505 | | Webpack compilation (injecting of webpack-defined global variables) | For Mocha, [`mochapack`](https://sysgears.github.io/mochapack/) is a library that allows this. [Jest's docs on webpack](https://jestjs.io/docs/en/webpack.html). There's no AVA documentation for webpack specifically, see below on Babel. | 506 | | Babel transpilation of ESNext code and use of alias module import statements (removing the need for `../../`) | All the frameworks allow for babel configuration | 507 | 508 | ## Recommendations 509 | 510 | As you can see, all the frameworks are incredibly robust for most testing needs. However, if you picked one at random, it might not work for a specific use case. It's not an easy choice, but here's how I'd break it down: 511 | 512 | - 🏅Mocha is recommended if you want your tests to run in any environment. It's incredibly community-supported and is extend-able with your favorite 3rd-party packages. Using `mocha-parallel-tests` (or 8.x with `--parallel` enabled) would give you a speed advantage. 513 | - 🏅Jest is recommended if you want to get tests up and running quickly. It has everything built in and requires very little configuration. The command line and GUI experience is unmatched. Finally, it's the most popular and makes an excellent pair with React. 514 | - 🏅AVA is recommended if you want a minimalist framework with no globals. AVA is fast, easy to configure, and you get ES-Next transpilation out of the box. You don't want hierarchical `describe` blocks and you want to support a smaller project. 515 | 516 | ## Conclusion 517 | 518 | I hope this article was helpful - whether it be in furthering your own investigation, giving you a place to start with learning, or helping your team choose the right test runner for your applications. Don't hesitate to [contact me](https://www.scraggo.com/) if you have questions or feedback. 519 | 520 | ### Want to contribute? 521 | 522 | Found a typo? Want to add details or make a correction? This repo is open-source and your contributions are 100% welcome 💥. 523 | 524 | ### External Resources 525 | 526 | Articles: 527 | 528 | - [Node.js & JavaScript Testing Best Practices (2020) - Medium](https://medium.com/@me_37286/yoni-goldberg-javascript-nodejs-testing-best-practices-2b98924c9347) 529 | - [An Overview of JavaScript Testing in 2020 - Medium](https://medium.com/welldone-software/an-overview-of-javascript-testing-7ce7298b9870) 530 | - 531 | - 532 | - 533 | - 534 | 535 | More on Mocha: 536 | 537 | - 538 | - [The Ultimate Unit Testing Cheat-sheet For Mocha, Chai and Sinon](https://gist.github.com/yoavniran/1e3b0162e1545055429e) 539 | - [Mocha's docs on the new --parallel flag](https://mochajs.org/#-parallel-p) 540 | - [This is a great resource on getting up and running with --parallel](https://developer.ibm.com/technologies/node-js/articles/parallel-tests-mocha-v8/) 541 | 542 | More on Jest: 543 | 544 | - 545 | - 546 | - [Migrating from Mocha to Jest - Airbnb Engineering & Data Science - Medium](https://medium.com/airbnb-engineering/unlocking-test-performance-migrating-from-mocha-to-jest-2796c508ec50) 547 | 548 | More on AVA: 549 | 550 | - 551 | - 552 | - [AVA, low-config testing for JavaScript - hello JS](https://blog.hellojs.org/ava-low-config-testing-for-javascript-71bd2d958745) 553 | - [Ava Test Runner - A Fresh Take On JavaScript Testing and Growing an Open-Source Project - Sessions by Pusher](https://pusher.com/sessions/meetup/the-js-roundabout/ava-test-runner-a-fresh-take-on-javascript-testing-and-growing-an-open-source-project) 554 | 555 | Packages: 556 | 557 | | Package URL | Category | Works With | Notes | 558 | | ------------------------------------------------------------------- | ------------ | ---------- | ---------------------------------------------------------------------------------------------------------------- | 559 | | [nyc - istanbul](https://istanbul.js.org/) | coverage | AVA, Mocha | | 560 | | [sinon](https://sinonjs.org/) | mocking | AVA, Mocha | | 561 | | [proxyquire](https://github.com/thlorenz/proxyquire) | mock-require | AVA, Mocha | | 562 | | [inject-loader](https://github.com/plasticine/inject-loader) | mock-require | AVA, Mocha | | 563 | | [chai](https://www.chaijs.com/) | assertions | Mocha | see [chai plugins](https://www.chaijs.com/plugins/) including `chai-immutable`, `chai-as-promised`, `chai-sinon` | 564 | | [mocha-snapshots](https://github.com/wellguimaraes/mocha-snapshots) | snapshot | Mocha | | 565 | | [mochapack](https://sysgears.github.io/mochapack/) | webpack | Mocha | | 566 | | [Majestic](https://github.com/Raathigesh/majestic/) | GUI | Jest | | 567 | -------------------------------------------------------------------------------- /docs/templates.md: -------------------------------------------------------------------------------- 1 | # Templates 2 | 3 | These templates shouldn't be tested on their own. They're used to generate multiple tests in their respective directories, `ava`, `jest`, and `mocha`. 4 | 5 | Defining `require` paths to files in the templates should be relative to `/test/` not necessarily relative to the templates directory. 6 | -------------------------------------------------------------------------------- /docs/test-runner.md: -------------------------------------------------------------------------------- 1 | # Comparing JavaScript Test Runners 2 | 3 | ![comparing-javascript-test-runners.png](../images/comparing-javascript-test-runners.png) 4 | 5 | JavaScript test runners for comparison: 6 | 7 | - AVA 8 | - Jest 9 | - Mocha (and an interesting wrapper called `mocha-parallel-tests`) 10 | 11 | This repo contains two things: 12 | 13 | 1. A node application that has the capability of running tests in all these frameworks. [Documentation Permalink](https://github.com/scraggo/comparing-javascript-test-runners/blob/master/docs/test-runner.md) 14 | 2. [A companion article](https://github.com/scraggo/comparing-javascript-test-runners/blob/master/README.md) that does an in-depth comparison of these frameworks and gives context about the motivation behind this application. [Permalink](https://github.com/scraggo/comparing-javascript-test-runners/blob/master/README.md) 15 | 16 | ## About the application 17 | 18 | This application is a test-runner that can: 19 | 20 | - create the same tests that are compatible with the testing frameworks above 21 | - run those tests with a comparison of the times it takes to execute them 22 | 23 | My goal was to create something similar to the [TodoMVC project](http://todomvc.com/) which compared the same "todo" app with different frameworks - React, Backbone, Ember, Vanilla, etc. For my test runner - I generate the same tests but with syntax that’s compatible with the test runners, capture the times it took to run, and output a report at the end. 24 | 25 | The number and length of the authored tests simulate a "true" test run in a significantly sized enterprise codebase. Each test runner has a template that will run the _same exact_ test blocks and take the _same exact_ amount of time in each block. (This is done with a `setTimeout` with a time that increases with each iteration of the loop that generates the test block.) 26 | 27 | To account for a bias in ordering, the scripts corresponding to each test runner are shuffled. This ensures that the suites for each test runner are never called in the same sequence. 28 | 29 | ![speed-test-results.png](../images/speed-test-results.png) 30 | 31 | ## Running the tests 32 | 33 | `npm install` to install all the packages. 34 | 35 | `npm run clean` (optional) to clear out all the generated test files. 36 | 37 | `npm run make-tests` to generate test files. 38 | 39 | `npm run test-all` to run **all** the generated tests and see a diagnostic output. 40 | 41 | `npm run test-ava` to run the generated `ava` tests 42 | 43 | `npm run test-jest` to run the generated `jest` tests 44 | 45 | `npm run test-mocha` to run the generated `mocha` tests 46 | 47 | `npm run test-parallel` to run the generated `mocha-parallel-tests` tests 48 | 49 | ## Development 50 | 51 | `npm run lint` to lint files. 52 | 53 | `npm run test` to run the internal codebase tests. 54 | 55 | ## Want to contribute? 56 | 57 | Found a typo? Want to add details or make a correction? This repo is open-source and your contributions are 100% welcome 💥. 58 | -------------------------------------------------------------------------------- /images/chart-downloads-slide.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/chart-downloads-slide.png -------------------------------------------------------------------------------- /images/chart-popularity-slide.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/chart-popularity-slide.png -------------------------------------------------------------------------------- /images/comparing-javascript-test-runners.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/comparing-javascript-test-runners.png -------------------------------------------------------------------------------- /images/jest-cli1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/jest-cli1.png -------------------------------------------------------------------------------- /images/jest-cli2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/jest-cli2.png -------------------------------------------------------------------------------- /images/jest-cli3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/jest-cli3.png -------------------------------------------------------------------------------- /images/logo-ava.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/logo-ava.png -------------------------------------------------------------------------------- /images/logo-jest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/logo-jest.png -------------------------------------------------------------------------------- /images/logo-mocha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/logo-mocha.png -------------------------------------------------------------------------------- /images/output-ava.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/output-ava.png -------------------------------------------------------------------------------- /images/output-jest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/output-jest.png -------------------------------------------------------------------------------- /images/output-mocha.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/output-mocha.png -------------------------------------------------------------------------------- /images/output-mpt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/output-mpt.png -------------------------------------------------------------------------------- /images/speed-test-results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/images/speed-test-results.png -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | // For a detailed explanation regarding each configuration property, visit: 2 | // https://jestjs.io/docs/en/configuration.html 3 | 4 | module.exports = { 5 | // All imported modules in your tests should be mocked automatically 6 | // automock: false, 7 | 8 | // Stop running tests after `n` failures 9 | // bail: 0, 10 | 11 | // Respect "browser" field in package.json when resolving modules 12 | // browser: false, 13 | 14 | // The directory where Jest should store its cached dependency information 15 | // cacheDirectory: "/private/var/folders/02/38pbtfzd4452bfxq7fcg9hdh08d2sg/T/jest_58zlb", 16 | 17 | // Automatically clear mock calls and instances between every test 18 | // clearMocks: false, 19 | 20 | // Indicates whether the coverage information should be collected while executing the test 21 | // collectCoverage: false, 22 | 23 | // An array of glob patterns indicating a set of files for which coverage information should be collected 24 | // collectCoverageFrom: undefined, 25 | 26 | // The directory where Jest should output its coverage files 27 | // coverageDirectory: undefined, 28 | 29 | // An array of regexp pattern strings used to skip coverage collection 30 | // coveragePathIgnorePatterns: [ 31 | // "/node_modules/" 32 | // ], 33 | 34 | // A list of reporter names that Jest uses when writing coverage reports 35 | // coverageReporters: [ 36 | // "json", 37 | // "text", 38 | // "lcov", 39 | // "clover" 40 | // ], 41 | 42 | // An object that configures minimum threshold enforcement for coverage results 43 | // coverageThreshold: undefined, 44 | 45 | // A path to a custom dependency extractor 46 | // dependencyExtractor: undefined, 47 | 48 | // Make calling deprecated APIs throw helpful error messages 49 | // errorOnDeprecated: false, 50 | 51 | // Force coverage collection from ignored files using an array of glob patterns 52 | // forceCoverageMatch: [], 53 | 54 | // A path to a module which exports an async function that is triggered once before all test suites 55 | // globalSetup: undefined, 56 | 57 | // A path to a module which exports an async function that is triggered once after all test suites 58 | // globalTeardown: undefined, 59 | 60 | // A set of global variables that need to be available in all test environments 61 | // globals: {}, 62 | 63 | // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers. 64 | // maxWorkers: "50%", 65 | 66 | // An array of directory names to be searched recursively up from the requiring module's location 67 | // moduleDirectories: [ 68 | // "node_modules" 69 | // ], 70 | 71 | // An array of file extensions your modules use 72 | // moduleFileExtensions: [ 73 | // "js", 74 | // "json", 75 | // "jsx", 76 | // "ts", 77 | // "tsx", 78 | // "node" 79 | // ], 80 | 81 | // A map from regular expressions to module names that allow to stub out resources with a single module 82 | // moduleNameMapper: {}, 83 | 84 | // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader 85 | // modulePathIgnorePatterns: [], 86 | 87 | // Activates notifications for test results 88 | // notify: false, 89 | 90 | // An enum that specifies notification mode. Requires { notify: true } 91 | // notifyMode: "failure-change", 92 | 93 | // A preset that is used as a base for Jest's configuration 94 | // preset: undefined, 95 | 96 | // Run tests from one or more projects 97 | // projects: undefined, 98 | 99 | // Use this configuration option to add custom reporters to Jest 100 | // reporters: undefined, 101 | 102 | // Automatically reset mock state between every test 103 | // resetMocks: false, 104 | 105 | // Reset the module registry before running each individual test 106 | // resetModules: false, 107 | 108 | // A path to a custom resolver 109 | // resolver: undefined, 110 | 111 | // Automatically restore mock state between every test 112 | // restoreMocks: false, 113 | 114 | // The root directory that Jest should scan for tests and modules within 115 | // rootDir: undefined, 116 | 117 | // A list of paths to directories that Jest should use to search for files in 118 | // roots: [ 119 | // "" 120 | // ], 121 | 122 | // Allows you to use a custom runner instead of Jest's default test runner 123 | // runner: "jest-runner", 124 | 125 | // The paths to modules that run some code to configure or set up the testing environment before each test 126 | // setupFiles: [], 127 | 128 | // A list of paths to modules that run some code to configure or set up the testing framework before each test 129 | // setupFilesAfterEnv: [], 130 | 131 | // A list of paths to snapshot serializer modules Jest should use for snapshot testing 132 | // snapshotSerializers: [], 133 | 134 | // The test environment that will be used for testing 135 | testEnvironment: 'node', 136 | 137 | // Options that will be passed to the testEnvironment 138 | // testEnvironmentOptions: {}, 139 | 140 | // Adds a location field to test results 141 | // testLocationInResults: false, 142 | 143 | // The glob patterns Jest uses to detect test files 144 | testMatch: [ 145 | // "**/__tests__/**/*.[jt]s?(x)", 146 | // "**/?(*.)+(spec|test).[tj]s?(x)" 147 | '/test/jest/*.js', 148 | ], 149 | 150 | // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped 151 | // testPathIgnorePatterns: [ 152 | // "/node_modules/" 153 | // ], 154 | 155 | // The regexp pattern or array of patterns that Jest uses to detect test files 156 | // testRegex: [], 157 | 158 | // This option allows the use of a custom results processor 159 | // testResultsProcessor: undefined, 160 | 161 | // This option allows use of a custom test runner 162 | // testRunner: "jasmine2", 163 | 164 | // This option sets the URL for the jsdom environment. It is reflected in properties such as location.href 165 | // testURL: "http://localhost", 166 | 167 | // Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout" 168 | // timers: "real", 169 | 170 | // A map from regular expressions to paths to transformers 171 | // transform: undefined, 172 | 173 | // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation 174 | // transformIgnorePatterns: [ 175 | // "/node_modules/" 176 | // ], 177 | 178 | // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them 179 | // unmockedModulePathPatterns: undefined, 180 | 181 | // Indicates whether each individual test should be reported during the run 182 | // verbose: undefined, 183 | 184 | // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode 185 | // watchPathIgnorePatterns: [], 186 | 187 | // Whether to use watchman for file crawling 188 | // watchman: true, 189 | }; 190 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "comparing-javascript-test-runners", 3 | "version": "1.0.0", 4 | "description": "A comparison of JavaScript test runners", 5 | "main": "index.js", 6 | "author": "scraggo@gmail.com", 7 | "license": "ISC", 8 | "scripts": { 9 | "clean": "rm ./test/{ava,jest,mocha}/*fullCircle* || echo 'No tests to remove.'", 10 | "lint": "eslint . --ext .js", 11 | "make-tests": "node ./src/generateTests", 12 | "test": "nyc mocha \"test/src/*.spec.js\" --require testSetup.js --parallel", 13 | "test-all": "node src/runAllTests", 14 | "test-ava": "ava \"test/ava/*.spec.js\"", 15 | "test-ava-time": "time ava \"test/ava/*.spec.js\"", 16 | "test-jest": "jest", 17 | "test-mocha": "mocha \"test/mocha/*.spec.js\"", 18 | "test-parallel": "mocha \"test/mocha/*.spec.js\" --parallel" 19 | }, 20 | "dependencies": { 21 | "ava": "^3.1.0", 22 | "chai": "^4.2.0", 23 | "execa": "^4.0.0", 24 | "jest": "^25.1.0", 25 | "mocha": "^8.0.1", 26 | "nyc": "^15.0.0" 27 | }, 28 | "devDependencies": { 29 | "eslint": "^6.1.0", 30 | "eslint-config-airbnb-base": "^14.0.0", 31 | "eslint-config-prettier": "^6.10.0", 32 | "eslint-plugin-import": "^2.20.0", 33 | "eslint-plugin-prettier": "^3.1.2", 34 | "prettier": "1.19.1", 35 | "sinon": "^9.0.2" 36 | }, 37 | "ava": { 38 | "verbose": true 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/fullCircle.js: -------------------------------------------------------------------------------- 1 | /** 2 | * After a period of self-reflection (wait), we return our former self. 3 | * @param {any} self 4 | * @param {number} wait time in ms 5 | * @returns {any} self 6 | */ 7 | const fullCircle = (self, wait) => { 8 | const timeDone = Date.now() + wait; 9 | 10 | while (Date.now() < timeDone) { 11 | // do nothing 12 | } 13 | 14 | return self; 15 | }; 16 | 17 | module.exports = { fullCircle, main: fullCircle }; 18 | -------------------------------------------------------------------------------- /src/generateTests/config.js: -------------------------------------------------------------------------------- 1 | const AVA = 'ava'; 2 | const JEST = 'jest'; 3 | const MOCHA = 'mocha'; 4 | const TEST_RUNNERS = [AVA, JEST, MOCHA]; 5 | 6 | // relative to root 7 | const TEST_DIR = './test'; 8 | const TEMPLATE_DIR = './test/templates'; 9 | 10 | const getTemplatePath = testRunner => `${TEMPLATE_DIR}/${testRunner}.js`; 11 | const getDestPath = testRunner => `${TEST_DIR}/${testRunner}`; 12 | 13 | /** 14 | * { 15 | * ava: { 16 | * dest: 'path', 17 | * template: 'path' 18 | * }, 19 | * ...etc 20 | * } 21 | */ 22 | const TEMPLATE_PATHS = TEST_RUNNERS.reduce((acc, testRunner) => { 23 | acc[testRunner] = { 24 | dest: getDestPath(testRunner), 25 | template: getTemplatePath(testRunner), 26 | }; 27 | return acc; 28 | }, {}); 29 | 30 | module.exports = { 31 | TEMPLATE_PATHS, 32 | TEST_RUNNERS, 33 | }; 34 | -------------------------------------------------------------------------------- /src/generateTests/generateTestFiles.js: -------------------------------------------------------------------------------- 1 | const { promises: fsPromises } = require('fs'); 2 | 3 | const { range } = require('../utils'); 4 | const { TEMPLATE_PATHS, TEST_RUNNERS } = require('./config'); 5 | 6 | const createTestFilename = (num, testDir, testRunner) => 7 | `${testDir}/${testRunner}-fullCircle${num}.spec.js`; 8 | 9 | // get testTemplate file as a string 10 | const getTemplateFile = async templateFilePath => 11 | fsPromises.readFile(templateFilePath, { 12 | encoding: 'utf8', 13 | }); 14 | 15 | // write template files to output test directory 16 | const makeALotOfTestFiles = (template, testDir, testRunner) => 17 | Promise.resolve( 18 | range(0, 50).map(num => 19 | fsPromises.writeFile( 20 | createTestFilename(num, testDir, testRunner), 21 | template 22 | ) 23 | ) 24 | ); 25 | 26 | const generateTestFiles = () => { 27 | try { 28 | TEST_RUNNERS.forEach(async testRunner => { 29 | const { dest, template } = TEMPLATE_PATHS[testRunner]; 30 | const testFile = await getTemplateFile(template); 31 | await makeALotOfTestFiles(testFile, dest, testRunner); 32 | }); 33 | } catch (err) { 34 | console.error(err); 35 | } 36 | }; 37 | 38 | module.exports = { 39 | generateTestFiles, 40 | }; 41 | -------------------------------------------------------------------------------- /src/generateTests/index.js: -------------------------------------------------------------------------------- 1 | const { generateTestFiles } = require('./generateTestFiles'); 2 | 3 | generateTestFiles(); 4 | -------------------------------------------------------------------------------- /src/runAllTests.js: -------------------------------------------------------------------------------- 1 | const execa = require('execa'); 2 | 3 | const { getDateTime, shuffle } = require('./utils'); 4 | 5 | // CONSTANTS 6 | 7 | const TEST_RUNNERS = ['ava', 'jest', 'mocha', 'parallel']; 8 | const DOTS = '. '.repeat(16); 9 | const testRunners = shuffle(TEST_RUNNERS); 10 | 11 | // just run one for now 12 | // const testRunners = shuffle(TEST_RUNNERS).slice(0, 1); 13 | 14 | // UTILS 15 | 16 | /** 17 | * @param {string[]} argsArray - the 'command', ex ['run', 'start'] 18 | * @param {object} options 19 | * @param {boolean} options.log output to console 20 | * @returns {function} function that runs npm script 21 | */ 22 | const makeNPMScript = (argsArray, options = {}) => () => { 23 | const { log } = options; 24 | let execaOptions = {}; 25 | if (log === true) { 26 | execaOptions = { 27 | all: true, 28 | // these are necessary in order to keep console colors and formatting 29 | stdio: 'inherit', 30 | sterr: 'inherit', 31 | }; 32 | } 33 | execa.sync('npm', argsArray, execaOptions); 34 | }; 35 | 36 | /** 37 | * @param {string[]} argsArray - the 'command', ex ['run', 'start'] 38 | * @returns {string} 39 | */ 40 | const formatNPMName = argsArray => `npm ${argsArray.join(' ')}`; 41 | 42 | const formatExecutionTime = (executionTime, message = '()') => { 43 | return `"${message}" took ${executionTime / 1000}s to execute.`; 44 | }; 45 | 46 | /** 47 | * @param {object} resultObj ex: {"command":["run","test-ava"],"executionTime":7989,"name":"ava"} 48 | * @returns {string} 49 | */ 50 | const formatResult = resultObj => { 51 | const { command, executionTime, name } = resultObj; 52 | const message = formatNPMName(command); 53 | const title = `${name.toUpperCase()}`; 54 | return [title, formatExecutionTime(executionTime, message)].join(' '); 55 | }; 56 | 57 | /** 58 | * Runs script 59 | * @param {object} scriptObj 60 | * @returns {number} execution time in ms 61 | */ 62 | const runScript = scriptObj => { 63 | const start = Date.now(); 64 | const { script } = scriptObj; 65 | script(); 66 | return Date.now() - start; 67 | }; 68 | 69 | const logTestTitle = name => { 70 | console.log('\n\n', DOTS, '\n running tests for', name, '\n', DOTS, '\n\n'); 71 | }; 72 | 73 | const logResultsHeader = () => { 74 | console.log(`\n${'-*'.repeat(20)}`); 75 | console.log(`RESULTS ${getDateTime()}`); 76 | console.log('order:', testRunners); 77 | console.log(''); 78 | }; 79 | 80 | // sort, format, and log test results 81 | const logResults = resultsArr => { 82 | resultsArr 83 | .sort((a, b) => a.executionTime - b.executionTime) 84 | .map(formatResult) 85 | .forEach(result => { 86 | console.log(result); 87 | }); 88 | }; 89 | 90 | // MAIN 91 | 92 | const testData = testRunners.reduce((acc, name) => { 93 | const command = ['run', `test-${name}`]; 94 | acc[name] = { 95 | command, 96 | executionTime: -1, 97 | name, 98 | run: () => 99 | runScript({ 100 | script: makeNPMScript(command, { log: true }), 101 | }), 102 | }; 103 | return acc; 104 | }, {}); 105 | 106 | const main = () => { 107 | try { 108 | const testResults = []; 109 | 110 | // eslint-disable-next-line no-restricted-syntax 111 | for (const name of testRunners) { 112 | const { run } = testData[name]; 113 | 114 | logTestTitle(name); 115 | 116 | testData[name].executionTime = run(); 117 | testResults.push(testData[name]); 118 | } 119 | 120 | logResultsHeader(); 121 | logResults(testResults); 122 | } catch (error) { 123 | console.error(error); 124 | } 125 | }; 126 | 127 | main(); 128 | -------------------------------------------------------------------------------- /src/utils/date.js: -------------------------------------------------------------------------------- 1 | const options = { 2 | weekday: 'long', 3 | year: 'numeric', 4 | month: 'long', 5 | day: 'numeric', 6 | }; 7 | 8 | const getDateTime = (inputDate = new Date()) => { 9 | const date = inputDate.toLocaleDateString(undefined, options); 10 | const time = `${inputDate.getHours()}:${inputDate.getMinutes()}:${inputDate.getSeconds()}`; 11 | return `${date} ${time}`; 12 | }; 13 | 14 | module.exports = { 15 | getDateTime, 16 | }; 17 | -------------------------------------------------------------------------------- /src/utils/index.js: -------------------------------------------------------------------------------- 1 | const dateUtils = require('./date'); 2 | const { range } = require('./range'); 3 | const { shuffle } = require('./shuffle'); 4 | 5 | module.exports = { 6 | ...dateUtils, 7 | range, 8 | shuffle, 9 | }; 10 | -------------------------------------------------------------------------------- /src/utils/range.js: -------------------------------------------------------------------------------- 1 | const isNumber = x => typeof x === 'number'; 2 | 3 | /** 4 | * @param {number} start inclusive 5 | * @param {number} end exclusive 6 | * @returns {number[]} array of numbers in range (start,end] 7 | */ 8 | const range = (start, end = 0) => { 9 | if ( 10 | !isNumber(start) || 11 | !isNumber(end) || 12 | start < 0 || 13 | end < 0 || 14 | end < start 15 | ) { 16 | throw new Error('Invalid start and/or end'); 17 | } 18 | 19 | const arr = []; 20 | 21 | if (start === 0 && end === 0) { 22 | return arr; 23 | } 24 | 25 | for (let i = start; i < end; i += 1) { 26 | arr.push(i); 27 | } 28 | 29 | return arr; 30 | }; 31 | 32 | module.exports = { range }; 33 | -------------------------------------------------------------------------------- /src/utils/shuffle.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Fisher Yates Shuffle 3 | * @param {Array} arrInput 4 | * @param {Object} [options={}] 5 | * @param {boolean} [options.clone=false] 6 | * @returns {Array} shuffled array 7 | */ 8 | function shuffle(arrInput, { clone = false } = {}) { 9 | const arr = clone ? arrInput.slice() : arrInput; 10 | 11 | for (let i = arr.length - 1; i > 0; i -= 1) { 12 | const j = Math.floor(Math.random() * (i + 1)); 13 | [arr[i], arr[j]] = [arr[j], arr[i]]; 14 | } 15 | 16 | return arr; 17 | } 18 | 19 | module.exports = { shuffle }; 20 | -------------------------------------------------------------------------------- /test/ava/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/test/ava/.gitkeep -------------------------------------------------------------------------------- /test/jest/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/test/jest/.gitkeep -------------------------------------------------------------------------------- /test/mocha/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scraggo/comparing-javascript-test-runners/e5cf9c65bb24813546d26c6e6dffea1ee6d3eedc/test/mocha/.gitkeep -------------------------------------------------------------------------------- /test/src/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "rules": { 3 | "func-names": 0 4 | }, 5 | "globals": { 6 | "expect": true 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /test/src/date.spec.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai'); 2 | const sinon = require('sinon'); 3 | 4 | const { getDateTime } = require('../../src/utils'); 5 | 6 | const randomDate = new Date('Jan 1 2000'); 7 | const convertedDate = 'Saturday, January 1, 2000 0:0:0'; 8 | 9 | describe('date utils', function() { 10 | describe('getDateTime', function() { 11 | it('gets a specific date time', function() { 12 | const res = getDateTime(randomDate); 13 | expect(res).to.equal(convertedDate); 14 | }); 15 | 16 | it('gets Date.now as default', function() { 17 | const stub = sinon.stub(global, 'Date').returns(randomDate); 18 | const res = getDateTime(); 19 | expect(res).to.equal(convertedDate); 20 | stub.restore(); 21 | }); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /test/src/range.spec.js: -------------------------------------------------------------------------------- 1 | const { range } = require('../../src/utils'); 2 | 3 | describe('range', function() { 4 | it('returns range array length 1', function() { 5 | expect(range(0, 1)).to.deep.equal([0]); 6 | }); 7 | it('returns range array length 2', function() { 8 | expect(range(0, 2)).to.deep.equal([0, 1]); 9 | }); 10 | it('returns range array with different start, length 2', function() { 11 | expect(range(2, 4)).to.deep.equal([2, 3]); 12 | }); 13 | it('returns empty array if end is undefined', function() { 14 | const res = range(0); 15 | expect(res).to.deep.equal([]); 16 | }); 17 | it('throws error under bad conditions', function() { 18 | expect(() => range()).to.throw(); 19 | expect(() => range(-1)).to.throw(); 20 | expect(() => range(-2, -1)).to.throw(); 21 | expect(() => range(2, 1)).to.throw(); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /test/src/shuffle.spec.js: -------------------------------------------------------------------------------- 1 | const { range, shuffle } = require('../../src/utils'); 2 | 3 | describe('shuffle', function() { 4 | it('returns array of same length', function() { 5 | expect(shuffle([0, 1])).to.have.length(2); 6 | expect(shuffle([0, 1, 2])).to.have.length(3); 7 | expect(shuffle([0, 1, 2, 3])).to.have.length(4); 8 | }); 9 | 10 | it('eventually shuffles cloned array', function() { 11 | const arr = [0, 1]; 12 | const NUM_OF_RUNS = 5; 13 | const runs = range(0, NUM_OF_RUNS).map(() => shuffle(arr, { clone: true })); 14 | 15 | expect(runs).to.deep.include([1, 0]); 16 | }); 17 | }); 18 | -------------------------------------------------------------------------------- /test/templates/ava.js: -------------------------------------------------------------------------------- 1 | const test = require('ava'); 2 | 3 | const { fullCircle } = require('../../src/fullCircle'); 4 | const { range } = require('../../src/utils'); 5 | 6 | test('ava fullCircle', t => { 7 | range(0, 25).forEach(num => { 8 | const res = fullCircle('hi', num); 9 | t.is(res, 'hi'); 10 | }); 11 | }); 12 | -------------------------------------------------------------------------------- /test/templates/jest.js: -------------------------------------------------------------------------------- 1 | const { fullCircle } = require('../../src/fullCircle'); 2 | const { range } = require('../../src/utils'); 3 | 4 | describe('jest fullCircle', function() { 5 | range(0, 25).forEach(num => { 6 | it(`returns self in ${num} milliseconds`, function() { 7 | const res = fullCircle('hi', num); 8 | expect(res).toEqual('hi'); 9 | }); 10 | }); 11 | }); 12 | -------------------------------------------------------------------------------- /test/templates/mocha.js: -------------------------------------------------------------------------------- 1 | const { expect } = require('chai'); 2 | 3 | const { fullCircle } = require('../../src/fullCircle'); 4 | const { range } = require('../../src/utils'); 5 | 6 | describe('mocha fullCircle', function() { 7 | range(0, 25).forEach(num => { 8 | it(`returns self in ${num} milliseconds`, function() { 9 | const res = fullCircle('hi', num); 10 | expect(res).to.equal('hi'); 11 | }); 12 | }); 13 | }); 14 | -------------------------------------------------------------------------------- /testSetup.js: -------------------------------------------------------------------------------- 1 | global.expect = require('chai').expect; 2 | --------------------------------------------------------------------------------