├── .github └── workflows │ ├── ci.yml │ └── publish.yml ├── .gitignore ├── .gitpod.yml ├── .vscode └── settings.json ├── CODE_OF_CONDUCT.md ├── LICENSE ├── PULL_REQUEST_TEMPLATE.md ├── README.md ├── _config.yml ├── docs ├── common │ └── images │ │ └── example_report.png ├── junit4.md └── junit5.md ├── junit4-examples ├── README.md ├── pom.xml └── src │ └── test │ └── java │ └── com │ └── github │ └── noconnor │ └── junitperf │ └── examples │ ├── ExampleAsyncTests.java │ ├── ExampleCommonReporter.java │ ├── ExampleConsoleReporter.java │ ├── ExampleFailureTests.java │ ├── ExampleSuccessTests.java │ └── utils │ └── ReportingUtils.java ├── junit5-examples ├── README.md ├── pom.xml └── src │ └── test │ └── java │ └── com │ └── github │ └── noconnor │ └── junitperf │ └── examples │ ├── ExampleAsyncTests.java │ ├── ExampleCommonReporter.java │ ├── ExampleConsoleReporter.java │ ├── ExampleFailureTests.java │ ├── ExampleParameterizedTests.java │ ├── ExampleSuccessTests.java │ ├── ExampleTestSuiteUsage.java │ ├── existing │ ├── TestClassOne.java │ ├── TestClassThree.java │ └── TestClassTwo.java │ └── utils │ └── ReportingUtils.java ├── junitperf-core ├── pom.xml └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── github │ │ │ └── noconnor │ │ │ └── junitperf │ │ │ ├── JUnitPerfTest.java │ │ │ ├── JUnitPerfTestRequirement.java │ │ │ ├── data │ │ │ ├── EvaluationContext.java │ │ │ ├── NoOpTestContext.java │ │ │ └── TestContext.java │ │ │ ├── datetime │ │ │ └── DatetimeUtils.java │ │ │ ├── reporting │ │ │ ├── ReportGenerator.java │ │ │ ├── providers │ │ │ │ ├── ConsoleReportGenerator.java │ │ │ │ ├── CsvReportGenerator.java │ │ │ │ ├── HtmlReportGenerator.java │ │ │ │ └── utils │ │ │ │ │ ├── ViewData.java │ │ │ │ │ └── ViewProcessor.java │ │ │ └── utils │ │ │ │ └── FormatterUtils.java │ │ │ ├── statements │ │ │ ├── EvaluationTask.java │ │ │ ├── ExceptionsRegistry.java │ │ │ ├── PerformanceEvaluationStatement.java │ │ │ ├── SimpleTestStatement.java │ │ │ └── TestStatement.java │ │ │ └── statistics │ │ │ ├── StatisticsCalculator.java │ │ │ └── providers │ │ │ ├── DescriptiveStatisticsCalculator.java │ │ │ └── NoOpStatisticsCollector.java │ └── resources │ │ └── templates │ │ └── report.template │ └── test │ ├── java │ └── com │ │ └── github │ │ └── noconnor │ │ └── junitperf │ │ ├── BaseTest.java │ │ ├── data │ │ ├── EvaluationContextTest.java │ │ ├── NoOpTestContextTest.java │ │ └── TestContextTest.java │ │ ├── datetime │ │ └── DatetimeUtilsTest.java │ │ ├── reporting │ │ ├── BaseReportGeneratorTest.java │ │ ├── providers │ │ │ ├── ConsoleReportGeneratorTest.java │ │ │ ├── CsvReportGeneratorTest.java │ │ │ ├── HtmlReportGeneratorTest.java │ │ │ └── utils │ │ │ │ ├── ViewDataTest.java │ │ │ │ └── ViewProcessorTest.java │ │ └── utils │ │ │ └── FormatterUtilsTest.java │ │ ├── statements │ │ ├── EvaluationTaskTest.java │ │ ├── ExceptionsRegistryTest.java │ │ └── PerformanceEvaluationStatementTest.java │ │ └── statistics │ │ └── providers │ │ └── DescriptiveStatisticsCalculatorTest.java │ └── resources │ ├── csv │ ├── fail_abort_succeed.csv │ ├── failed.csv │ ├── mix.csv │ ├── passed.csv │ └── some_failures.csv │ └── html │ ├── example_aborted_failed_success.html │ ├── example_all_failed_report.html │ ├── example_all_passed_report.html │ ├── example_mixed_report.html │ └── example_some_failures_report.html ├── junitperf-junit4 ├── pom.xml └── src │ ├── main │ └── java │ │ └── com │ │ └── github │ │ └── noconnor │ │ └── junitperf │ │ ├── JUnitPerfAsyncRule.java │ │ ├── JUnitPerfRule.java │ │ └── statements │ │ ├── DefaultStatement.java │ │ ├── EmptyStatement.java │ │ └── MeasurableStatement.java │ └── test │ └── java │ └── com │ └── github │ └── noconnor │ └── junitperf │ ├── JUnitPerfAsyncRuleTest.java │ ├── JUnitPerfRuleTest.java │ └── statements │ ├── DefaultStatementTest.java │ └── MeasurableStatementTest.java ├── junitperf-junit5 ├── pom.xml └── src │ ├── main │ ├── java │ │ └── com │ │ │ └── github │ │ │ └── noconnor │ │ │ └── junitperf │ │ │ ├── JUnitPerfInterceptor.java │ │ │ ├── JUnitPerfReportingConfig.java │ │ │ ├── JUnitPerfTestActiveConfig.java │ │ │ ├── TestContextSupplier.java │ │ │ ├── statements │ │ │ └── FullStatement.java │ │ │ ├── suite │ │ │ └── SuiteRegistry.java │ │ │ └── utils │ │ │ └── TestReflectionUtils.java │ └── resources │ │ └── META-INF │ │ └── services │ │ └── org.junit.jupiter.api.extension.Extension │ └── test │ └── java │ └── com │ └── github │ └── noconnor │ └── junitperf │ ├── JUnitPerfInterceptorTest.java │ ├── JUnitPerfReportingConfigTest.java │ ├── TestContextSupplierTest.java │ ├── statements │ └── FullStatementTest.java │ ├── suite │ └── SuiteRegistryTest.java │ └── utils │ └── TestReflectionUtilsTest.java └── pom.xml /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Java CI 2 | on: [push, pull_request] 3 | jobs: 4 | build: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - name: Checkout 8 | uses: actions/checkout@v4 9 | - name: Set up JDK 8 10 | uses: actions/setup-java@v4 11 | with: 12 | java-version: '8' 13 | distribution: 'adopt' 14 | - name: Build with Maven 15 | run: mvn clean install -Dgpg.skip 16 | - name: Upload coverage to Codecov 17 | uses: codecov/codecov-action@v4 18 | env: 19 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish package to the Maven Central Repository 2 | on: 3 | release: 4 | types: [created] 5 | jobs: 6 | publish: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | with: 11 | fetch-depth: 0 12 | - name: Set up Maven Central Repository 13 | uses: actions/setup-java@v4 14 | with: 15 | java-version: '11' 16 | distribution: 'adopt' 17 | server-id: ossrh 18 | server-username: MAVEN_CENTRAL_USERNAME 19 | server-password: MAVEN_CENTRAL_PASSWORD 20 | gpg-private-key: ${{ secrets.GPG_SIGNING_KEY }} 21 | gpg-passphrase: MAVEN_GPG_PASSPHRASE 22 | - name: Setup git config 23 | run: | 24 | LATEST_TAG=$(git describe --tags `git rev-list --tags --max-count=1`) 25 | git checkout -b version-update-${LATEST_TAG} 26 | git config user.name "github-actions[release]" 27 | git config user.email "github-actions[release]@users.noreply.github.com" 28 | - name: Publish package 29 | run: mvn -B -DprojectVersionPolicyId=SemVerVersionPolicy release:prepare release:perform 30 | env: 31 | MAVEN_CENTRAL_USERNAME: ${{ secrets.OSSRH_USERNAME }} 32 | MAVEN_CENTRAL_PASSWORD: ${{ secrets.OSSRH_TOKEN }} 33 | MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Include code style settings 3 | !.idea/ 4 | .idea/* 5 | !.idea/codeStyleSettings.xml 6 | *.iml 7 | 8 | # Gradle: 9 | # .idea/gradle.xml 10 | # .idea/libraries 11 | **/build/** 12 | **/classes/** 13 | 14 | **/out/ 15 | **/target/** 16 | .gradle 17 | 18 | **/*.versionsBackup 19 | **/*.releaseBackup 20 | release.properties 21 | -------------------------------------------------------------------------------- /.gitpod.yml: -------------------------------------------------------------------------------- 1 | # This configuration file was automatically generated by Gitpod. 2 | # Please adjust to your needs (see https://www.gitpod.io/docs/introduction/learn-gitpod/gitpod-yaml) 3 | # and commit this file to your remote git repository to share the goodness with others. 4 | 5 | # Learn more from ready-to-use templates: https://www.gitpod.io/docs/introduction/getting-started/quickstart 6 | 7 | tasks: 8 | - init: mvn install -DskipTests=false -Dgpg.skip 9 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "java.compile.nullAnalysis.mode": "disabled", 3 | "editor.codeActionsOnSave": { 4 | "source.organizeImports": true 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | ## Our Standards 8 | 9 | Examples of behavior that contributes to creating a positive environment include: 10 | 11 | * Using welcoming and inclusive language 12 | * Being respectful of differing viewpoints and experiences 13 | * Gracefully accepting constructive criticism 14 | * Focusing on what is best for the community 15 | * Showing empathy towards other community members 16 | 17 | Examples of unacceptable behavior by participants include: 18 | 19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances 20 | * Trolling, insulting/derogatory comments, and personal or political attacks 21 | * Public or private harassment 22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission 23 | * Other conduct which could reasonably be considered inappropriate in a professional setting 24 | 25 | ## Our Responsibilities 26 | 27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 28 | 29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 30 | 31 | ## Scope 32 | 33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. 34 | 35 | ## Enforcement 36 | 37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at noconnorie@protonmail.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 38 | 39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. 40 | 41 | ## Attribution 42 | 43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] 44 | 45 | [homepage]: http://contributor-covenant.org 46 | [version]: http://contributor-covenant.org/version/1/4/ 47 | -------------------------------------------------------------------------------- /PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Fixes # 2 | 3 | ## Proposed Changes 4 | 5 | - 6 | - 7 | - 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # JUnitPerf ![Build Status](https://github.com/noconnor/JUnitPerf/actions/workflows/ci.yml/badge.svg) [![codecov](https://codecov.io/gh/noconnor/JUnitPerf/branch/master/graph/badge.svg)](https://codecov.io/gh/noconnor/JUnitPerf) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.github.noconnor/junitperf/badge.svg)](https://maven-badges.herokuapp.com/maven-central/com.github.noconnor/junitperf) 2 | 3 | 4 | API performance testing framework built using JUnit 5 | 6 | JUnitPerf provides extensions to the JUnit4 & JUnit5 frameworks, allowing unittests to be extended to operate as 7 | performance evaluation tests. 8 | 9 | This library is best suited for testing remote API endpoints or component/integration testing. 10 | If attempting to benchmark code blocks with nanosecond latency then you should consider using [JMH](http://openjdk.java.net/projects/code-tools/jmh/) 11 | 12 | This library interface was heavily influenced by the interface in the deprecated 13 | [Contiperf library](https://github.com/lucaspouzac/contiperf) developed by [Lucas Pouzac](https://github.com/lucaspouzac) 14 | 15 |
16 | 17 | ## Contents 18 | 19 | [Usage Instructions](#usage-instructions) 20 | 21 | [Reports](#reports) 22 | 23 | [Statistics](#statistics) 24 | 25 | [Build Instructions](#build-instructions) 26 | 27 |
28 | 29 | ## Usage Instructions 30 | 31 | JunitPerf library supports both junit4 and junit5 bindings. 32 | Usage documentation for each binding can be found here: 33 | 34 | * [Junit4 usage documentation](docs/junit4.md) 35 | * [Junit5 usage documentation](docs/junit5.md) 36 | 37 | 38 |
39 | 40 | ## Test Configuration Options 41 | 42 | `@JUnitPerfTest` has the following configuration parameters: 43 | 44 | | Property | Definition | Default value | 45 | |:---------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------:| 46 | | threads | The total number of threads to use during test execution | 1 | 47 | | durationMs | Total time to run the test in millisecs (ms) (includes warmup period) | 60,000 | 48 | | warmUpMs | Warm up period in ms, test logic will be executed during warm up, but results will not be considered during statistics evaluation | 0 | 49 | | maxExecutionsPerSecond | Sets the maximum number of iteration per second (disabled by default) | -1 | 50 | | rampUpPeriodMs | Framework ramps up its executions per second smoothly over the duration of this period (disabled by default) | 0 | 51 | | totalExecutions | A best effort target for the total number of times the test method should be executed, this setting takes precedence over durationMs (disabled by default) | -1 | 52 | 53 | These configuration parameters can be overridden at runtime by specifying a VM args of the form: `-Djunitperf.=X` 54 | 55 | i.e. To set a test duration of 10 mins at runtime, specify `-Djunitperf.durationMs=600000`. 56 | This will override the `durationMs` set in the `@JUnitPerfTest` annotation. 57 | 58 | **NOTE:** Do not use "_" when defining runtime integer or long override values, i.e. use `600000` and not `600_000` 59 | 60 |
61 | 62 | `@JUnitPerfTestRequirement` has the following configuration parameters: 63 | 64 | | Property | Definition | Default value | 65 | |:-----------------------|:------------------------------------------------------------------------------------------------------------------------------|:---------------:| 66 | | percentiles | Comma separated list of ms percentile targets, format: percentile1:limit,percentile2:limit (ie. 90:3.3,99:6.8) | "" | 67 | | executionsPerSec | Target executions per second | 1 | 68 | | allowedErrorPercentage | Allowed % of errors (uncaught exceptions) during test execution (value between 0 and 1, where 1 = 100% errors allowed) | 0 | 69 | | minLatency | Expected minimum latency in ms, if minimum latency is above this value, test will fail | disabled | 70 | | maxLatency | Expected maximum latency in ms, if maximum latency is above this value, test will fail | disabled | 71 | | meanLatency | Expected mean latency in ms, if mean latency is above this value, test will fail | disabled | 72 | 73 |
74 | 75 | ## Reports 76 | 77 | [HTML Reports](#html-reports) 78 | 79 | [Console Reporting](#console-reporting) 80 | 81 | [CSV Reporting](#csv-reporting) 82 | 83 |
84 | 85 | #### HTML Reports 86 | 87 | An example Html report can be seen below: 88 | 89 | ![HTML Report](https://raw.githubusercontent.com/noconnor/JUnitPerf/master/docs/common/images/example_report.png "Example JUnitPerf html report") 90 | 91 | Hovering over the datapoints on the percentile latency graph will provide latency/percentile information. 92 | 93 | The HTML reporter will generate an HTML performance report under `${BUILD_DIR}/reports/junitperf_report.html` 94 | 95 | It is possible to override the template by placing a customised src/main/resources/templates/report.template file on the classpath ahead of the default template. 96 | 97 |
98 | 99 | #### Console Reporting 100 | 101 | It is also possible to use one of the other built-in reporters, the console reporter. for example: 102 | 103 | Example output: 104 | 105 | ``` 106 | 15:55:06.575 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - Started at: 2017-10-28 15:55:05 107 | 15:55:06.580 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - Invocations: 765 108 | 15:55:06.580 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - - Success: 765 109 | 15:55:06.580 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - - Errors: 0 110 | 15:55:06.580 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - - Errors: 0.0% - PASSED 111 | 15:55:06.581 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - 112 | 15:55:06.581 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - Thread Count: 1 113 | 15:55:06.581 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - Warm up: 0ms 114 | 15:55:06.581 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - 115 | 15:55:06.581 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - Execution time: 1000ms 116 | 15:55:06.581 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - Throughput: 766/s (Required: 10000/s) - FAILED!! 117 | 15:55:06.581 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - Min. latency: 1.012392ms 118 | 15:55:06.582 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - Max latency: 3.74209ms 119 | 15:55:06.582 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - Ave latency: 1.2975845ms 120 | 15:55:06.583 [main] INFO c.g.n.j.r.p.ConsoleReportGenerator - 121 | ``` 122 | 123 |
124 | 125 | #### CSV Reporting 126 | 127 | It is also possible to use the built-in CSV reporter. 128 | The CSV reporter will generate a CSV file at the default location `${BUILD_DIR}/reports/junitperf_report.csv`. 129 | 130 | The CSV output will have the following format: 131 | 132 | ``` 133 | testName,duration,threadCount,throughput,minLatencyMs,maxLatencyMs,meanLatencyMs,percentileData 134 | unittest1,10000,50,101,500000.0,1.430,6.430,1:0.0;2:0.0;3:0.0;4:0.0;5:0.0; ... ;98:4.03434;99:4.83434680 135 | ``` 136 | 137 | NOTE: the percentileData is formatted as ```percentile1:latency;percentile2:latency; ...``` 138 | 139 | 140 |
141 | 142 | 143 | ## Statistics 144 | 145 | By default, statistics are captured and calculated using the apache [Descriptive Statistics library](http://commons.apache.org/proper/commons-math/userguide/stat.html#a1.2_Descriptive_statistics). 146 | See [DescriptiveStatisticsCalculator](junitperf-core/src/main/java/com/github/noconnor/junitperf/statistics/providers/DescriptiveStatisticsCalculator.java) for more details. 147 | 148 | The default statistics calculator has an "infinite" size sampling window. 149 | As a result, long-running tests may require a lot of memory to hold all test samples. 150 | The window size may be set to a fixed size as follows : `new DescriptiveStatisticsCalculator(1_000_000)` 151 | 152 | 153 |
154 | 155 | ## Build Instructions 156 | 157 | To compile this project and run tests execute the following command from the root project directory: ` mvn clean test -Dgpg.skip` 158 | 159 | To generate a library jar execute: `mvn clean package -Dgpg.skip` 160 | 161 | **Intellij 14 Setup** 162 | 163 | To run/add to this project using intellij you will require the following plugins: 164 | 165 | * [Lombok](https://plugins.jetbrains.com/plugin/6317) 166 | * CodeStyle Formatter 167 |
168 | To configure your IntelliJ settings to use this formatter: 169 | * IntelliJ IDEA > Preferences > Editor > Code Style > Scheme > Project (Apply Settings) 170 | 171 | To resolve issues with lombok annotations not being compiled during a module make try setting the following preference: 172 | 173 | * Go to the preferences (settings) menu 174 | * Search for the "Compiler" section in the dialog window and then go to the "Annotation Processors" subsection 175 | * Tick the checkbox reading "Enable annotation processing" 176 | 177 |
178 | 179 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman -------------------------------------------------------------------------------- /docs/common/images/example_report.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/noconnor/JUnitPerf/2bc3ba143183d717ba3ce2c66123219943bca73d/docs/common/images/example_report.png -------------------------------------------------------------------------------- /junit4-examples/README.md: -------------------------------------------------------------------------------- 1 | ### Running instructions: 2 | 3 | To run examples using maven: 4 | 5 | * From project root directory run: `mvn clean install -Dgpg.skip` 6 | * From `junit4-examples` directory run: `mvn clean test -DskipTests=false` 7 | 8 | **NOTE:** The example tests contain some example failure scenarios, so you should expect `mvn test` command to fail when running these examples. 9 | -------------------------------------------------------------------------------- /junit4-examples/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | junitperf-parent 6 | com.github.noconnor 7 | 1.37.0-SNAPSHOT 8 | 9 | 4.0.0 10 | junit4-examples 11 | 12 | 13 | true 14 | 15 | 16 | 17 | 18 | com.github.noconnor 19 | junitperf 20 | 1.37.0-SNAPSHOT 21 | test 22 | 23 | 24 | 25 | 26 | 27 | 28 | org.apache.maven.plugins 29 | maven-deploy-plugin 30 | 31 | ${skipTests} 32 | 33 | 34 | 35 | org.apache.maven.plugins 36 | maven-surefire-plugin 37 | 38 | ${skipTests} 39 | 40 | 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /junit4-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleAsyncTests.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import com.github.noconnor.junitperf.JUnitPerfAsyncRule; 4 | import com.github.noconnor.junitperf.JUnitPerfTest; 5 | import com.github.noconnor.junitperf.data.TestContext; 6 | import org.junit.AfterClass; 7 | import org.junit.BeforeClass; 8 | import org.junit.Rule; 9 | import org.junit.Test; 10 | 11 | import java.util.concurrent.ExecutorService; 12 | import java.util.concurrent.Executors; 13 | import java.util.concurrent.ThreadLocalRandom; 14 | 15 | import static com.github.noconnor.junitperf.examples.utils.ReportingUtils.newHtmlReporter; 16 | 17 | public class ExampleAsyncTests { 18 | 19 | @Rule 20 | public JUnitPerfAsyncRule rule = new JUnitPerfAsyncRule(newHtmlReporter("async_test.html")); 21 | 22 | private static ExecutorService pool; 23 | 24 | @BeforeClass 25 | public static void setup() { 26 | pool = Executors.newFixedThreadPool(100); 27 | } 28 | 29 | @AfterClass 30 | public static void teardown() { 31 | pool.shutdownNow(); 32 | } 33 | 34 | @Test 35 | @JUnitPerfTest(durationMs = 10_000, warmUpMs = 1_000, maxExecutionsPerSecond = 100) 36 | public void whenTestExecutesAsynchronously_thenMeasurementsCanStillBeCaptured() { 37 | TestContext context = rule.newContext(); 38 | pool.submit(() -> { 39 | someProcessingDelay(); 40 | if (isSuccessful()) { 41 | context.success(); 42 | } else { 43 | context.fail(); 44 | } 45 | }); 46 | } 47 | 48 | private boolean isSuccessful() { 49 | return ThreadLocalRandom.current().nextInt(0, 100) > 50; 50 | } 51 | 52 | private void someProcessingDelay() { 53 | try { 54 | Thread.sleep(100); 55 | } catch (InterruptedException e) { 56 | // IGNORE 57 | } 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /junit4-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleCommonReporter.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import java.io.IOException; 4 | import java.net.InetSocketAddress; 5 | import java.net.Socket; 6 | import org.junit.Rule; 7 | import org.junit.Test; 8 | import org.junit.runner.RunWith; 9 | import org.junit.runners.Suite; 10 | import com.github.noconnor.junitperf.JUnitPerfRule; 11 | import com.github.noconnor.junitperf.JUnitPerfTest; 12 | import com.github.noconnor.junitperf.JUnitPerfTestRequirement; 13 | import com.github.noconnor.junitperf.reporting.providers.HtmlReportGenerator; 14 | 15 | import static com.github.noconnor.junitperf.examples.utils.ReportingUtils.newHtmlReporter; 16 | import static org.junit.Assert.assertTrue; 17 | 18 | @RunWith(Suite.class) 19 | @Suite.SuiteClasses({ 20 | ExampleCommonReporter.TestClassOne.class, 21 | ExampleCommonReporter.TestClassTwo.class 22 | }) 23 | public class ExampleCommonReporter { 24 | 25 | // Both test classes should report to the same HTML file 26 | private static final HtmlReportGenerator REPORTER = newHtmlReporter("common_reporter.html"); 27 | 28 | public static class TestClassOne { 29 | @Rule 30 | public JUnitPerfRule perfRule = new JUnitPerfRule(REPORTER); 31 | 32 | @Test 33 | @JUnitPerfTest(threads = 10, durationMs = 10_000, warmUpMs = 1_000, rampUpPeriodMs = 2_000, maxExecutionsPerSecond = 100) 34 | public void whenNoRequirementsArePresent_thenTestShouldAlwaysPass() throws IOException { 35 | try (Socket socket = new Socket()) { 36 | socket.connect(new InetSocketAddress("www.google.com", 80), 1000); 37 | assertTrue(socket.isConnected()); 38 | } 39 | } 40 | } 41 | 42 | public static class TestClassTwo { 43 | @Rule 44 | public JUnitPerfRule perfRule = new JUnitPerfRule(REPORTER); 45 | 46 | @Test 47 | @JUnitPerfTest(threads = 1, durationMs = 1_000, maxExecutionsPerSecond = 1_000) 48 | @JUnitPerfTestRequirement(executionsPerSec = 10_000) 49 | public void whenThroughputRequirementIsNotMet_thenTestShouldFail() throws InterruptedException { 50 | // Mock some processing logic 51 | Thread.sleep(1); 52 | } 53 | } 54 | 55 | 56 | } 57 | -------------------------------------------------------------------------------- /junit4-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleConsoleReporter.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import org.junit.Rule; 4 | import org.junit.Test; 5 | import com.github.noconnor.junitperf.JUnitPerfRule; 6 | import com.github.noconnor.junitperf.JUnitPerfTest; 7 | import com.github.noconnor.junitperf.reporting.providers.ConsoleReportGenerator; 8 | 9 | public class ExampleConsoleReporter { 10 | 11 | @Rule 12 | public JUnitPerfRule jUnitPerfRule = new JUnitPerfRule(new ConsoleReportGenerator()); 13 | 14 | @Test 15 | @JUnitPerfTest(threads = 1, warmUpMs = 1_000, durationMs = 2_000) 16 | public void test1() throws InterruptedException { 17 | Thread.sleep(10); 18 | } 19 | 20 | @Test 21 | @JUnitPerfTest(threads = 1, warmUpMs = 1_000, durationMs = 2_000) 22 | public void test2() throws InterruptedException { 23 | Thread.sleep(10); 24 | } 25 | 26 | @Test 27 | @JUnitPerfTest(threads = 1, warmUpMs = 1_000, durationMs = 2_000) 28 | public void test3() throws InterruptedException { 29 | Thread.sleep(10); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /junit4-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleFailureTests.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import org.junit.Rule; 4 | import org.junit.Test; 5 | import com.github.noconnor.junitperf.JUnitPerfRule; 6 | import com.github.noconnor.junitperf.JUnitPerfTest; 7 | import com.github.noconnor.junitperf.JUnitPerfTestRequirement; 8 | 9 | import static com.github.noconnor.junitperf.examples.utils.ReportingUtils.newHtmlReporter; 10 | 11 | public class ExampleFailureTests { 12 | 13 | @Rule 14 | public JUnitPerfRule perfRule = new JUnitPerfRule(newHtmlReporter("failures.html")); 15 | 16 | @Test 17 | @JUnitPerfTest(threads = 1, durationMs = 1_000, maxExecutionsPerSecond = 1_000) 18 | @JUnitPerfTestRequirement(executionsPerSec = 10_000) 19 | public void whenThroughputRequirementIsNotMet_thenTestShouldFail() throws InterruptedException { 20 | // Mock some processing logic 21 | Thread.sleep(1); 22 | } 23 | 24 | @Test 25 | @JUnitPerfTest(threads = 1, durationMs = 1_000, maxExecutionsPerSecond = 1_000) 26 | @JUnitPerfTestRequirement(executionsPerSec = 10, percentiles = "99:1") 27 | public void whenLatencyRequirementIsNotMet_thenTestShouldFail() throws InterruptedException { 28 | // Mock some processing logic 29 | Thread.sleep(2); 30 | } 31 | 32 | @Test 33 | @JUnitPerfTest(threads = 1, durationMs = 1_000, maxExecutionsPerSecond = 1_000) 34 | public void whenNoRequirementsAreSpecified_andExceptionIsThrown_thenTestShouldFail() throws InterruptedException { 35 | // Mock some processing logic 36 | Thread.sleep(2); 37 | throw new IllegalStateException("testing failure"); 38 | } 39 | 40 | } 41 | -------------------------------------------------------------------------------- /junit4-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleSuccessTests.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import java.io.IOException; 4 | import java.net.InetSocketAddress; 5 | import java.net.Socket; 6 | import org.junit.After; 7 | import org.junit.Before; 8 | import org.junit.Rule; 9 | import org.junit.Test; 10 | import com.github.noconnor.junitperf.JUnitPerfRule; 11 | import com.github.noconnor.junitperf.JUnitPerfTest; 12 | 13 | import static com.github.noconnor.junitperf.examples.utils.ReportingUtils.newHtmlReporter; 14 | import static org.junit.Assume.assumeFalse; 15 | 16 | public class ExampleSuccessTests { 17 | 18 | @Rule 19 | public JUnitPerfRule perfRule = new JUnitPerfRule( true, newHtmlReporter("success.html")); 20 | 21 | @Before 22 | public void setup() throws InterruptedException { 23 | Thread.sleep(1_000); 24 | } 25 | 26 | @After 27 | public void teardown() throws InterruptedException { 28 | Thread.sleep(1_000); 29 | } 30 | 31 | @Test 32 | @JUnitPerfTest(threads = 10, durationMs = 10_000, warmUpMs = 1_000, rampUpPeriodMs = 2_000, maxExecutionsPerSecond = 100) 33 | public void whenNoRequirementsArePresent_thenTestShouldAlwaysPass() throws IOException { 34 | try (Socket socket = new Socket()) { 35 | socket.connect(new InetSocketAddress("www.google.com", 80), 1000); 36 | } 37 | } 38 | 39 | @Test 40 | @JUnitPerfTest(threads = 10, durationMs = 10_000, warmUpMs = 1_000, rampUpPeriodMs = 2_000, totalExecutions = 100) 41 | public void whenAssumptionFails_thenTestShouldBeSkipped() throws IOException { 42 | //noinspection DataFlowIssue 43 | assumeFalse(true); // dummy test to illustrate skipped tests 44 | try (Socket socket = new Socket()) { 45 | socket.connect(new InetSocketAddress("www.google.com", 80), 1000); 46 | } 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /junit4-examples/src/test/java/com/github/noconnor/junitperf/examples/utils/ReportingUtils.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples.utils; 2 | 3 | import com.github.noconnor.junitperf.reporting.providers.HtmlReportGenerator; 4 | 5 | import static java.lang.System.getProperty; 6 | 7 | public class ReportingUtils { 8 | 9 | public static HtmlReportGenerator newHtmlReporter(String fileName){ 10 | return new HtmlReportGenerator(getProperty("user.dir") + "/build/reports/" + fileName); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /junit5-examples/README.md: -------------------------------------------------------------------------------- 1 | ### Running instructions: 2 | 3 | To run examples using maven: 4 | 5 | * From project root directory run: `mvn clean install -Dgpg.skip` 6 | * From `junit5-examples` directory run: `mvn clean test -DskipTests=false` 7 | 8 | **NOTE:** The example tests contain some example failure scenarios, so you should expect `mvn test` command to fail when running these examples. 9 | -------------------------------------------------------------------------------- /junit5-examples/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | junitperf-parent 6 | com.github.noconnor 7 | 1.37.0-SNAPSHOT 8 | 9 | 4.0.0 10 | junit5-examples 11 | 12 | 13 | true 14 | 15 | 16 | 17 | 18 | com.github.noconnor 19 | junitperf-junit5 20 | 1.37.0-SNAPSHOT 21 | test 22 | 23 | 24 | 25 | org.junit.platform 26 | junit-platform-suite-engine 27 | 1.9.3 28 | test 29 | 30 | 31 | 32 | 33 | 34 | 35 | org.apache.maven.plugins 36 | maven-deploy-plugin 37 | 38 | ${skipTests} 39 | 40 | 41 | 42 | org.apache.maven.plugins 43 | maven-surefire-plugin 44 | 45 | ${skipTests} 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleAsyncTests.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import com.github.noconnor.junitperf.*; 4 | import com.github.noconnor.junitperf.data.TestContext; 5 | import org.junit.jupiter.api.AfterAll; 6 | import org.junit.jupiter.api.BeforeAll; 7 | import org.junit.jupiter.api.Test; 8 | import org.junit.jupiter.api.extension.ExtendWith; 9 | 10 | import java.util.concurrent.ExecutorService; 11 | import java.util.concurrent.Executors; 12 | import java.util.concurrent.ThreadLocalRandom; 13 | 14 | import static com.github.noconnor.junitperf.examples.utils.ReportingUtils.newHtmlReporter; 15 | 16 | @ExtendWith(JUnitPerfInterceptor.class) 17 | public class ExampleAsyncTests { 18 | 19 | // Should be static or new instance will be created for each @Test method 20 | @JUnitPerfTestActiveConfig 21 | private final static JUnitPerfReportingConfig PERF_CONFIG = JUnitPerfReportingConfig.builder() 22 | .reportGenerator(newHtmlReporter("async_test.html")) 23 | .build(); 24 | private static ExecutorService pool; 25 | 26 | @BeforeAll 27 | public static void setup() { 28 | pool = Executors.newFixedThreadPool(100); 29 | } 30 | 31 | @AfterAll 32 | public static void teardown() { 33 | pool.shutdownNow(); 34 | } 35 | 36 | @Test 37 | @JUnitPerfTest(durationMs = 10_000, warmUpMs = 1_000, maxExecutionsPerSecond = 100) 38 | public void whenTestExecutesAsynchronously_thenMeasurementsCanStillBeCaptured(TestContextSupplier supplier) { 39 | // Starts the task timer 40 | TestContext context = supplier.startMeasurement(); 41 | pool.submit(() -> { 42 | someProcessingDelay(); 43 | if (isSuccessful()) { 44 | // marks task as successful and stops the task time measurement 45 | context.success(); 46 | } else { 47 | // marks task as failure and stops the task time measurement 48 | context.fail(); 49 | } 50 | }); 51 | } 52 | 53 | private boolean isSuccessful() { 54 | return ThreadLocalRandom.current().nextInt(0, 100) > 50; 55 | } 56 | 57 | private void someProcessingDelay() { 58 | try { 59 | Thread.sleep(100); 60 | } catch (InterruptedException e) { 61 | // IGNORE 62 | } 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleCommonReporter.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import com.github.noconnor.junitperf.*; 4 | import com.github.noconnor.junitperf.reporting.providers.HtmlReportGenerator; 5 | import org.junit.jupiter.api.Test; 6 | import org.junit.jupiter.api.extension.ExtendWith; 7 | import org.junit.platform.suite.api.SelectClasses; 8 | import org.junit.platform.suite.api.Suite; 9 | 10 | import java.io.IOException; 11 | import java.net.InetSocketAddress; 12 | import java.net.Socket; 13 | 14 | import static com.github.noconnor.junitperf.examples.utils.ReportingUtils.newHtmlReporter; 15 | import static org.junit.Assert.assertTrue; 16 | 17 | // Suite tests require the junit-platform-suite-engine dependency (see pom file) 18 | @Suite 19 | @SelectClasses({ 20 | ExampleCommonReporter.TestClassOne.class, 21 | ExampleCommonReporter.TestClassTwo.class 22 | }) 23 | public class ExampleCommonReporter { 24 | 25 | // Both test classes should report to the same HTML file 26 | private static final HtmlReportGenerator REPORTER = newHtmlReporter("common_reporter.html"); 27 | 28 | @ExtendWith(JUnitPerfInterceptor.class) 29 | public static class TestClassOne { 30 | 31 | // Should be static or new instance will be created for each @Test method 32 | @JUnitPerfTestActiveConfig 33 | private static final JUnitPerfReportingConfig PERF_CONFIG = JUnitPerfReportingConfig.builder() 34 | .reportGenerator(REPORTER) 35 | .build(); 36 | 37 | @Test 38 | @JUnitPerfTest(threads = 10, durationMs = 10_000, warmUpMs = 1_000, rampUpPeriodMs = 2_000, maxExecutionsPerSecond = 100) 39 | public void whenNoRequirementsArePresent_thenTestShouldAlwaysPass() throws IOException { 40 | try (Socket socket = new Socket()) { 41 | socket.connect(new InetSocketAddress("www.google.com", 80), 1000); 42 | assertTrue(socket.isConnected()); 43 | } 44 | } 45 | } 46 | 47 | @ExtendWith(JUnitPerfInterceptor.class) 48 | public static class TestClassTwo { 49 | 50 | // Should be static or new instance will be created for each @Test method 51 | @JUnitPerfTestActiveConfig 52 | private static final JUnitPerfReportingConfig PERF_CONFIG = JUnitPerfReportingConfig.builder() 53 | .reportGenerator(REPORTER) 54 | .build(); 55 | 56 | @Test 57 | @JUnitPerfTest(threads = 1, durationMs = 1_000, maxExecutionsPerSecond = 1_000) 58 | @JUnitPerfTestRequirement(executionsPerSec = 10_000) 59 | public void whenThroughputRequirementIsNotMet_thenTestShouldFail() throws InterruptedException { 60 | // Mock some processing logic 61 | Thread.sleep(1); 62 | } 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleConsoleReporter.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import com.github.noconnor.junitperf.JUnitPerfInterceptor; 4 | import com.github.noconnor.junitperf.JUnitPerfTest; 5 | import com.github.noconnor.junitperf.JUnitPerfTestActiveConfig; 6 | import com.github.noconnor.junitperf.JUnitPerfReportingConfig; 7 | import com.github.noconnor.junitperf.reporting.providers.ConsoleReportGenerator; 8 | import org.junit.jupiter.api.Test; 9 | import org.junit.jupiter.api.extension.ExtendWith; 10 | 11 | @ExtendWith(JUnitPerfInterceptor.class) 12 | public class ExampleConsoleReporter { 13 | 14 | 15 | // Should be static or new instance will be created for each @Test method 16 | @JUnitPerfTestActiveConfig 17 | private static final JUnitPerfReportingConfig PERF_CONFIG = JUnitPerfReportingConfig.builder() 18 | .reportGenerator(new ConsoleReportGenerator()) 19 | .build(); 20 | 21 | 22 | @Test 23 | @JUnitPerfTest(threads = 1, warmUpMs = 1_000, durationMs = 2_000) 24 | public void test1() throws InterruptedException { 25 | Thread.sleep(10); 26 | } 27 | 28 | @Test 29 | @JUnitPerfTest(threads = 1, warmUpMs = 1_000, durationMs = 2_000) 30 | public void test2() throws InterruptedException { 31 | Thread.sleep(10); 32 | } 33 | 34 | @Test 35 | @JUnitPerfTest(threads = 1, warmUpMs = 1_000, durationMs = 2_000) 36 | public void test3() throws InterruptedException { 37 | Thread.sleep(10); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleFailureTests.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import com.github.noconnor.junitperf.*; 4 | import org.junit.jupiter.api.Test; 5 | import org.junit.jupiter.api.extension.ExtendWith; 6 | 7 | import static com.github.noconnor.junitperf.examples.utils.ReportingUtils.newHtmlReporter; 8 | 9 | @ExtendWith(JUnitPerfInterceptor.class) 10 | public class ExampleFailureTests { 11 | 12 | // Should be static or new instance will be created for each @Test method 13 | @JUnitPerfTestActiveConfig 14 | private final static JUnitPerfReportingConfig PERF_CONFIG = JUnitPerfReportingConfig.builder() 15 | .reportGenerator(newHtmlReporter("failures.html")) 16 | .build(); 17 | 18 | @Test 19 | @JUnitPerfTest(threads = 1, durationMs = 1_000, maxExecutionsPerSecond = 1_000) 20 | @JUnitPerfTestRequirement(executionsPerSec = 10_000) 21 | public void whenThroughputRequirementIsNotMet_thenTestShouldFail() throws InterruptedException { 22 | // Mock some processing logic 23 | Thread.sleep(1); 24 | } 25 | 26 | @Test 27 | @JUnitPerfTest(threads = 1, durationMs = 1_000, maxExecutionsPerSecond = 1_000) 28 | @JUnitPerfTestRequirement(executionsPerSec = 10, percentiles = "99:1") 29 | public void whenLatencyRequirementIsNotMet_thenTestShouldFail() throws InterruptedException { 30 | // Mock some processing logic 31 | Thread.sleep(2); 32 | } 33 | 34 | @Test 35 | @JUnitPerfTest(threads = 1, durationMs = 1_000, maxExecutionsPerSecond = 1_000) 36 | public void whenNoRequirementsAreSpecified_andExceptionIsThrown_thenTestShouldFail() throws InterruptedException { 37 | // Mock some processing logic 38 | Thread.sleep(2); 39 | throw new IllegalStateException("testing failure"); 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleParameterizedTests.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import java.io.IOException; 4 | import java.net.InetSocketAddress; 5 | import java.net.Socket; 6 | import java.util.Arrays; 7 | import java.util.List; 8 | 9 | import org.junit.jupiter.api.extension.ExtendWith; 10 | import org.junit.jupiter.params.ParameterizedTest; 11 | import org.junit.jupiter.params.provider.MethodSource; 12 | 13 | import com.github.noconnor.junitperf.JUnitPerfInterceptor; 14 | import com.github.noconnor.junitperf.JUnitPerfTest; 15 | import com.github.noconnor.junitperf.TestContextSupplier; 16 | import com.github.noconnor.junitperf.data.TestContext; 17 | 18 | @ExtendWith(JUnitPerfInterceptor.class) 19 | public class ExampleParameterizedTests { 20 | 21 | static List hostnames() { 22 | return Arrays.asList("www.google.com", "www.example.com"); 23 | } 24 | 25 | @MethodSource("hostnames") 26 | @ParameterizedTest(name = "test1(hostname = {0})") 27 | @JUnitPerfTest(durationMs = 3_000, maxExecutionsPerSecond = 1) 28 | public void test1(String hostname) throws IOException { 29 | try (Socket socket = new Socket()) { 30 | socket.connect(new InetSocketAddress(hostname, 80), 1000); 31 | } 32 | } 33 | 34 | @MethodSource("hostnames") 35 | @ParameterizedTest(name = "test2(hostname = {0})") 36 | @JUnitPerfTest(durationMs = 3_000, maxExecutionsPerSecond = 1) 37 | public void test2(String hostname, TestContextSupplier supplier) { 38 | TestContext context = supplier.startMeasurement(); 39 | try (Socket socket = new Socket()) { 40 | socket.connect(new InetSocketAddress(hostname, 80), 1000); 41 | context.success(); 42 | } catch (IOException e) { 43 | context.fail(); 44 | } 45 | } 46 | 47 | } 48 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleSuccessTests.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import com.github.noconnor.junitperf.JUnitPerfInterceptor; 4 | import com.github.noconnor.junitperf.JUnitPerfReportingConfig; 5 | import com.github.noconnor.junitperf.JUnitPerfTest; 6 | import com.github.noconnor.junitperf.JUnitPerfTestActiveConfig; 7 | import org.junit.jupiter.api.AfterEach; 8 | import org.junit.jupiter.api.BeforeEach; 9 | import org.junit.jupiter.api.Test; 10 | import org.junit.jupiter.api.extension.ExtendWith; 11 | 12 | import java.io.IOException; 13 | import java.net.InetSocketAddress; 14 | import java.net.Socket; 15 | 16 | import static com.github.noconnor.junitperf.examples.utils.ReportingUtils.newHtmlReporter; 17 | import static org.junit.jupiter.api.Assumptions.assumeFalse; 18 | 19 | @ExtendWith(JUnitPerfInterceptor.class) 20 | public class ExampleSuccessTests { 21 | 22 | // Should be static or new instance will be created for each @Test method 23 | @JUnitPerfTestActiveConfig 24 | private final static JUnitPerfReportingConfig PERF_CONFIG = JUnitPerfReportingConfig.builder() 25 | .reportGenerator(newHtmlReporter("success.html")) 26 | .build(); 27 | 28 | @BeforeEach 29 | public void setup() throws InterruptedException { 30 | Thread.sleep(10); 31 | } 32 | 33 | @AfterEach 34 | public void teardown() throws InterruptedException { 35 | Thread.sleep(10); 36 | } 37 | 38 | @Test 39 | @JUnitPerfTest(threads = 10, durationMs = 10_000, warmUpMs = 1_000, rampUpPeriodMs = 2_000, maxExecutionsPerSecond = 100) 40 | public void whenNoRequirementsArePresent_thenTestShouldAlwaysPass() throws IOException { 41 | try (Socket socket = new Socket()) { 42 | socket.connect(new InetSocketAddress("www.google.com", 80), 1000); 43 | } 44 | } 45 | 46 | @Test 47 | @JUnitPerfTest(threads = 10, durationMs = 10_000, warmUpMs = 1_000, rampUpPeriodMs = 2_000, totalExecutions = 50) 48 | public void whenTotalNumberOfExecutionsIsSet_thenTotalExecutionsShouldOverrideDurationMs() throws IOException { 49 | try (Socket socket = new Socket()) { 50 | socket.connect(new InetSocketAddress("www.google.com", 80), 1000); 51 | } 52 | } 53 | 54 | @Test 55 | @JUnitPerfTest(threads = 10, durationMs = 10_000, warmUpMs = 1_000, rampUpPeriodMs = 2_000, maxExecutionsPerSecond = 100) 56 | public void whenAssumptionFails_thenTestWillBeSkipped() throws IOException { 57 | //noinspection DataFlowIssue 58 | assumeFalse(true); // dummy test to illustrate skipped tests 59 | 60 | try (Socket socket = new Socket()) { 61 | socket.connect(new InetSocketAddress("www.google.com", 80), 1000); 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/ExampleTestSuiteUsage.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples; 2 | 3 | import com.github.noconnor.junitperf.JUnitPerfReportingConfig; 4 | import com.github.noconnor.junitperf.JUnitPerfTest; 5 | import com.github.noconnor.junitperf.JUnitPerfTestActiveConfig; 6 | import com.github.noconnor.junitperf.JUnitPerfTestRequirement; 7 | import com.github.noconnor.junitperf.examples.existing.TestClassOne; 8 | import com.github.noconnor.junitperf.examples.existing.TestClassTwo; 9 | import org.junit.platform.suite.api.ConfigurationParameter; 10 | import org.junit.platform.suite.api.SelectClasses; 11 | import org.junit.platform.suite.api.Suite; 12 | 13 | import static com.github.noconnor.junitperf.examples.utils.ReportingUtils.newHtmlReporter; 14 | 15 | 16 | // 17 | // To run suite: mvn -Dtest=ExampleTestSuiteUsage -DskipTests=false test 18 | // 19 | 20 | @Suite 21 | //@SelectPackages({ 22 | // "com.github.noconnor.junitperf.examples.existing" 23 | //}) 24 | @SelectClasses({ 25 | TestClassOne.class, 26 | TestClassTwo.class 27 | }) 28 | // ConfigurationParameter: Required to enable Test Suite Interceptor Reference: https://www.baeldung.com/junit-5-extensions#1-automatic-extension-registration 29 | @ConfigurationParameter(key = "junit.jupiter.extensions.autodetection.enabled", value = "true") 30 | @JUnitPerfTest(totalExecutions = 100) 31 | @JUnitPerfTestRequirement(allowedErrorPercentage = 0.01F) 32 | public class ExampleTestSuiteUsage { 33 | 34 | @JUnitPerfTestActiveConfig 35 | public static JUnitPerfReportingConfig config = JUnitPerfReportingConfig.builder() 36 | .reportGenerator(newHtmlReporter("suite_reporter.html")) 37 | .build(); 38 | 39 | } 40 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/existing/TestClassOne.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples.existing; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | import static org.junit.jupiter.api.Assumptions.assumeFalse; 6 | 7 | public class TestClassOne { 8 | @Test 9 | public void sample_test1_class1() throws InterruptedException { 10 | Thread.sleep(5); 11 | } 12 | 13 | @Test 14 | public void sample_test2_class1() throws InterruptedException { 15 | // Mock some processing logic 16 | Thread.sleep(1); 17 | } 18 | 19 | @Test 20 | public void sample_test3_class1() throws InterruptedException { 21 | //noinspection DataFlowIssue 22 | assumeFalse(true); // dummy test to illustrate skipped tests 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/existing/TestClassThree.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples.existing; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | public class TestClassThree { 6 | @Test 7 | public void sample_test1_class3() throws InterruptedException { 8 | Thread.sleep(5); 9 | } 10 | 11 | @Test 12 | public void sample_test2_class3() throws InterruptedException { 13 | // Mock some processing logic 14 | Thread.sleep(1); 15 | } 16 | 17 | } 18 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/existing/TestClassTwo.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples.existing; 2 | 3 | import org.junit.jupiter.api.Test; 4 | 5 | public class TestClassTwo { 6 | 7 | @Test 8 | public void sample_test1_class2() throws InterruptedException { 9 | // Mock some processing logic 10 | Thread.sleep(1); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /junit5-examples/src/test/java/com/github/noconnor/junitperf/examples/utils/ReportingUtils.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.examples.utils; 2 | 3 | import com.github.noconnor.junitperf.reporting.providers.HtmlReportGenerator; 4 | 5 | import static java.lang.System.getProperty; 6 | 7 | public class ReportingUtils { 8 | 9 | public static HtmlReportGenerator newHtmlReporter(String fileName){ 10 | return new HtmlReportGenerator(getProperty("user.dir") + "/build/reports/" + fileName); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /junitperf-core/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | junitperf-parent 6 | com.github.noconnor 7 | 1.37.0-SNAPSHOT 8 | 9 | 4.0.0 10 | junitperf-core 11 | 12 | 13 | 14 | 15 | org.projectlombok 16 | lombok 17 | ${lombok.version} 18 | compile 19 | 20 | 21 | org.apache.commons 22 | commons-lang3 23 | ${commons.lang3.version} 24 | 25 | 26 | ch.qos.logback 27 | logback-classic 28 | ${logback.classic.version} 29 | 30 | 31 | com.google.guava 32 | guava 33 | ${guava.version} 34 | 35 | 36 | commons-collections 37 | commons-collections 38 | ${commons.collection.version} 39 | 40 | 41 | org.apache.commons 42 | commons-math3 43 | ${commons.math3.version} 44 | 45 | 46 | 47 | 48 | 49 | 50 | org.jacoco 51 | jacoco-maven-plugin 52 | 53 | 54 | 55 | 56 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/JUnitPerfTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | @Retention(RetentionPolicy.RUNTIME) 9 | @Target({ElementType.METHOD, ElementType.TYPE}) 10 | public @interface JUnitPerfTest { 11 | 12 | // Total number of threads to use during the test evaluations 13 | int threads() default 1; 14 | 15 | // Total test duration (milliseconds) after which no more evaluations will take place 16 | int durationMs() default 60_000; 17 | 18 | // During the warm up period (milliseconds) test execution results will be ignored and will not be considered in test result evaluations 19 | int warmUpMs() default 0; 20 | 21 | // Test will execute no more that specified "rateLimit" executions per second 22 | // Default value is no limit 23 | int maxExecutionsPerSecond() default -1; 24 | 25 | // The duration of the period where the framework ramps up its executions per second, 26 | // before reaching its stable (maxExecutionsPerSecond) rate 27 | // If maxExecutionsPerSecond is not set, this attribute will have no effect 28 | int rampUpPeriodMs() default 0; 29 | 30 | // Test will execute totalExecutions number of iterations & complete 31 | // This is a best effort target, test will execute for at least this number of executions. 32 | // If durationMs & totalExecutions are set, totalExecutions will take precedence over test duration 33 | // Default value is no limit 34 | int totalExecutions() default -1; 35 | } 36 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/JUnitPerfTestRequirement.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | @Retention(RetentionPolicy.RUNTIME) 9 | @Target({ElementType.METHOD, ElementType.TYPE}) 10 | public @interface JUnitPerfTestRequirement { 11 | 12 | // Expected target percentile distribution in the format "percentile1:expected_value_ms,percentile2:expected_value_ms,..." 13 | String percentiles() default ""; 14 | 15 | // Expected test throughput (executions per second) 16 | int executionsPerSec() default 0; 17 | 18 | // Expected % of test failures. Failures are measured as test case exceptions, default 0% errors allowed 19 | float allowedErrorPercentage() default 0; 20 | 21 | // Expected minimum latency in ms, if minimum latency is above this value, test will fail 22 | float minLatency() default -1; 23 | 24 | // Expected maximum latency in ms, if maximum latency is above this value, test will fail 25 | float maxLatency() default -1; 26 | 27 | // Expected mean latency in ms, if mean latency is above this value, test will fail 28 | float meanLatency() default -1; 29 | 30 | } 31 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/data/NoOpTestContext.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.data; 2 | 3 | public class NoOpTestContext extends TestContext { 4 | 5 | public static final NoOpTestContext INSTANCE = new NoOpTestContext(); 6 | 7 | public NoOpTestContext() { 8 | super(null); 9 | } 10 | 11 | @Override 12 | public void success() { 13 | // Do nothing 14 | } 15 | 16 | @Override 17 | public void fail() { 18 | // Do nothing 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/data/TestContext.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.data; 2 | 3 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 4 | 5 | import static java.lang.System.nanoTime; 6 | 7 | public class TestContext { 8 | 9 | private final StatisticsCalculator stats; 10 | private final long startTimeNs; 11 | 12 | public TestContext(StatisticsCalculator stats) { 13 | this.stats = stats; 14 | this.startTimeNs = nanoTime(); 15 | } 16 | 17 | public void success() { 18 | stats.incrementEvaluationCount(); 19 | stats.addLatencyMeasurement(nanoTime() - startTimeNs); 20 | } 21 | 22 | public void fail() { 23 | stats.incrementEvaluationCount(); 24 | stats.incrementErrorCount(); 25 | stats.addLatencyMeasurement(nanoTime() - startTimeNs); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/datetime/DatetimeUtils.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.datetime; 2 | 3 | import lombok.Setter; 4 | import lombok.experimental.UtilityClass; 5 | 6 | import java.time.LocalDateTime; 7 | import java.time.format.DateTimeFormatter; 8 | 9 | import static java.util.Objects.nonNull; 10 | 11 | @UtilityClass 12 | public class DatetimeUtils { 13 | 14 | @Setter 15 | private static String override; 16 | 17 | public static String now() { 18 | if (nonNull(override)) { 19 | return override; 20 | } 21 | return LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")); 22 | } 23 | 24 | public static String format(int durationMs) { 25 | long seconds = durationMs / 1000; 26 | long minutes = seconds / 60; 27 | long hours = minutes / 60; 28 | long days = hours / 24; 29 | if (days > 0){ 30 | return days + "d:" + hours % 24 + "h:" + minutes % 60 + "m:" + seconds % 60 + "s"; 31 | } else if (hours > 0){ 32 | return hours % 24 + "h:" + minutes % 60 + "m:" + seconds % 60 + "s"; 33 | } else if (minutes > 0){ 34 | return minutes % 60 + "m:" + seconds % 60 + "s"; 35 | } if (seconds > 0){ 36 | return (seconds % 60) + "s"; 37 | } else { 38 | return durationMs + "ms"; 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/reporting/ReportGenerator.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting; 2 | 3 | import com.github.noconnor.junitperf.data.EvaluationContext; 4 | 5 | import java.util.LinkedHashSet; 6 | 7 | public interface ReportGenerator { 8 | 9 | void generateReport(LinkedHashSet testContexts); 10 | 11 | String getReportPath(); 12 | 13 | } 14 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/reporting/providers/ConsoleReportGenerator.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.providers; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | 5 | import java.util.Iterator; 6 | import java.util.LinkedHashSet; 7 | import com.github.noconnor.junitperf.data.EvaluationContext; 8 | import com.github.noconnor.junitperf.reporting.ReportGenerator; 9 | 10 | import static com.github.noconnor.junitperf.reporting.utils.FormatterUtils.format; 11 | 12 | @Slf4j 13 | public class ConsoleReportGenerator implements ReportGenerator { 14 | 15 | private static final String PASSED = "PASSED"; 16 | private static final String FAILED = "FAILED!!"; 17 | 18 | 19 | @Override 20 | public void generateReport(LinkedHashSet testContexts) { 21 | // Only output the last context 22 | final Iterator itr = testContexts.iterator(); 23 | EvaluationContext context = itr.next(); 24 | while (itr.hasNext()) { 25 | context = itr.next(); 26 | } 27 | updateReport(context); 28 | } 29 | 30 | public void updateReport(EvaluationContext context) { 31 | if (context.isAborted()) { 32 | log.info("Test {} was SKIPPED", context.getTestName()); 33 | } else { 34 | 35 | String throughputStatus = context.isThroughputAchieved() ? PASSED : FAILED; 36 | String errorRateStatus = context.isErrorThresholdAchieved() ? PASSED : FAILED; 37 | 38 | log.info("Test Name: {}", context.getTestName()); 39 | log.info("Started at: {}", context.getStartTime()); 40 | log.info("Invocations: {}", context.getEvaluationCount()); 41 | log.info(" - Success: {}", context.getEvaluationCount() - context.getErrorCount()); 42 | log.info(" - Errors: {}", context.getErrorCount()); 43 | log.info(" - Errors: {}% - {}", context.getErrorPercentage(), errorRateStatus); 44 | log.info(""); 45 | log.info("Thread Count: {}", context.getConfiguredThreads()); 46 | log.info("Warm up: {} ms", context.getConfiguredWarmUp()); 47 | log.info("Ramp up: {} ms", context.getConfiguredRampUpPeriodMs()); 48 | log.info(""); 49 | log.info("Execution time: {}", context.getTestDurationFormatted()); 50 | log.info("Throughput: {}/s (Required: {}/s) - {}", 51 | context.getThroughputQps(), 52 | context.getRequiredThroughput(), 53 | throughputStatus); 54 | log.info("Min. latency: {} ms (Required: {}ms) - {}", 55 | context.getMinLatencyMs(), 56 | format(context.getRequiredMinLatency())); 57 | log.info("Max. latency: {} ms (Required: {}ms) - {}", 58 | context.getMaxLatencyMs(), 59 | format(context.getRequiredMaxLatency())); 60 | log.info("Ave. latency: {} ms (Required: {}ms) - {}", 61 | context.getMeanLatencyMs(), 62 | format(context.getRequiredMeanLatency())); 63 | context.getRequiredPercentiles().forEach((percentile, threshold) -> { 64 | String percentileStatus = context.getPercentileResults().get(percentile) ? PASSED : FAILED; 65 | log.info("{}: {}ms (Required: {} ms) - {}", 66 | percentile, 67 | context.getLatencyPercentileMs(percentile), 68 | format(threshold), 69 | percentileStatus); 70 | }); 71 | log.info(""); 72 | log.info(""); 73 | } 74 | } 75 | 76 | @Override 77 | public String getReportPath() { 78 | return null; 79 | } 80 | 81 | } 82 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/reporting/providers/CsvReportGenerator.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.providers; 2 | 3 | import lombok.extern.slf4j.Slf4j; 4 | 5 | import java.io.BufferedWriter; 6 | import java.io.FileNotFoundException; 7 | import java.io.FileOutputStream; 8 | import java.io.IOException; 9 | import java.io.OutputStreamWriter; 10 | import java.io.UnsupportedEncodingException; 11 | import java.util.LinkedHashSet; 12 | import java.util.List; 13 | import java.util.Set; 14 | import java.util.stream.IntStream; 15 | import com.github.noconnor.junitperf.data.EvaluationContext; 16 | import com.github.noconnor.junitperf.reporting.ReportGenerator; 17 | import com.google.common.base.Joiner; 18 | 19 | import static java.lang.System.getProperty; 20 | import static java.util.stream.Collectors.toList; 21 | 22 | @Slf4j 23 | public class CsvReportGenerator implements ReportGenerator { 24 | 25 | private static final String DEFAULT_REPORT_PATH = getProperty("user.dir") + "/build/reports/junitperf_report.csv"; 26 | 27 | private final String reportPath; 28 | private final Set history; 29 | 30 | public CsvReportGenerator() { 31 | this(DEFAULT_REPORT_PATH); 32 | } 33 | 34 | @SuppressWarnings("WeakerAccess") 35 | public CsvReportGenerator(String reportPath) { 36 | this.reportPath = reportPath; 37 | this.history = new LinkedHashSet<>(); 38 | } 39 | 40 | @Override 41 | public void generateReport(LinkedHashSet testContexts) { 42 | history.addAll(testContexts); 43 | try (BufferedWriter writer = newBufferedWriter()) { 44 | 45 | writer.write(buildHeader()); 46 | writer.newLine(); 47 | history.forEach(context -> { 48 | 49 | String name = context.isAborted() ? context.getTestName() + " (skipped)" : context.getTestName(); 50 | int duration = context.isAborted() ? 0 : context.getConfiguredDuration(); 51 | String record = String.format("%s,%s,%d,%d,%.4f,%.4f,%.4f,%s", 52 | name, 53 | duration, 54 | context.getConfiguredThreads(), 55 | context.getThroughputQps(), 56 | context.getMinLatencyMs(), 57 | context.getMaxLatencyMs(), 58 | context.getMeanLatencyMs(), 59 | Joiner.on(",").skipNulls().join(generateFormattedPercentileData(context))); 60 | try { 61 | writer.write(record); 62 | writer.newLine(); 63 | } catch (IOException e) { 64 | log.error("Unable to write record {}", record); 65 | } 66 | }); 67 | } catch (Exception e) { 68 | throw new IllegalStateException(e); 69 | } 70 | } 71 | 72 | @Override 73 | public String getReportPath() { 74 | return reportPath; 75 | } 76 | 77 | private List generateFormattedPercentileData(final EvaluationContext context) { 78 | return IntStream.range(1, 101).mapToObj(i -> { 79 | return String.format("%.4f", context.getLatencyPercentileMs(i)); 80 | }).collect(toList()); 81 | } 82 | 83 | private BufferedWriter newBufferedWriter() throws UnsupportedEncodingException, FileNotFoundException { 84 | return new BufferedWriter(new OutputStreamWriter(new FileOutputStream(reportPath), "utf-8")); 85 | } 86 | 87 | private String buildHeader() { 88 | String header = "testName,duration,threadCount,throughput,minLatencyNs,maxLatencyNs,meanLatencyNs,"; 89 | List percentiles = IntStream.range(1, 101).mapToObj(CsvReportGenerator::ordinal).collect(toList()); 90 | header = header.replace("", Joiner.on(",").join(percentiles)); 91 | return header; 92 | } 93 | 94 | // https://stackoverflow.com/questions/6810336/is-there-a-way-in-java-to-convert-an-integer-to-its-ordinal 95 | private static String ordinal(int i) { 96 | int mod100 = i % 100; 97 | int mod10 = i % 10; 98 | if (mod10 == 1 && mod100 != 11) { 99 | return i + "st"; 100 | } else if (mod10 == 2 && mod100 != 12) { 101 | return i + "nd"; 102 | } else if (mod10 == 3 && mod100 != 13) { 103 | return i + "rd"; 104 | } else { 105 | return i + "th"; 106 | } 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/reporting/providers/HtmlReportGenerator.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.providers; 2 | 3 | import com.github.noconnor.junitperf.data.EvaluationContext; 4 | import com.github.noconnor.junitperf.reporting.ReportGenerator; 5 | import com.github.noconnor.junitperf.reporting.providers.utils.ViewData; 6 | import com.github.noconnor.junitperf.reporting.providers.utils.ViewProcessor; 7 | import lombok.experimental.UtilityClass; 8 | import lombok.extern.slf4j.Slf4j; 9 | 10 | import java.io.File; 11 | import java.io.InputStream; 12 | import java.nio.charset.StandardCharsets; 13 | import java.nio.file.Files; 14 | import java.nio.file.Path; 15 | import java.nio.file.Paths; 16 | import java.util.ArrayDeque; 17 | import java.util.Deque; 18 | import java.util.HashMap; 19 | import java.util.HashSet; 20 | import java.util.LinkedHashSet; 21 | import java.util.Map; 22 | import java.util.Scanner; 23 | import java.util.Set; 24 | import java.util.stream.Collectors; 25 | 26 | import static java.lang.System.getProperty; 27 | import static java.util.Objects.isNull; 28 | import static java.util.Objects.nonNull; 29 | 30 | @Slf4j 31 | public class HtmlReportGenerator implements ReportGenerator { 32 | 33 | private static final String DEFAULT_REPORT_PATH = String.join( 34 | File.separator, 35 | getProperty("user.dir") , 36 | "build", 37 | "reports", 38 | "junitperf_report.html" 39 | ); 40 | 41 | private static final String REPORT_TEMPLATE = "/templates/report.template"; 42 | 43 | private static final String OVERVIEW_MARKER = "{% OVERVIEW_BLOCK %}"; 44 | private static final String DETAILS_MARKER = "{% DETAILED_BLOCK %}"; 45 | private static final String PERCENTILE_TARGETS_MARKER = "{% PERCENTILES_BLOCK %}"; 46 | 47 | 48 | private final String reportPath; 49 | private final Set history; 50 | 51 | public HtmlReportGenerator() { 52 | this(DEFAULT_REPORT_PATH); 53 | } 54 | 55 | @SuppressWarnings("WeakerAccess") 56 | public HtmlReportGenerator(String reportPath) { 57 | this.reportPath = reportPath; 58 | this.history = new LinkedHashSet<>(); 59 | } 60 | 61 | @Override 62 | public synchronized void generateReport(LinkedHashSet testContexts) { 63 | history.addAll(testContexts); 64 | renderTemplate(); 65 | } 66 | 67 | @Override 68 | public String getReportPath() { 69 | return reportPath; 70 | } 71 | 72 | private void renderTemplate() { 73 | try { 74 | Path outputPath = Paths.get(reportPath); 75 | 76 | Files.createDirectories(outputPath.getParent()); 77 | log.info("Rendering report to: " + outputPath); 78 | 79 | Map blocks = HtmlTemplateProcessor.parseTemplateBlocks(); 80 | 81 | String root = blocks.get("root"); 82 | 83 | StringBuilder overviews = new StringBuilder(); 84 | StringBuilder details = new StringBuilder(); 85 | 86 | for (EvaluationContext context : history) { 87 | ViewData c = new ViewData(context); 88 | 89 | String overview = ViewProcessor.populateTemplate(c, "context", blocks.get(OVERVIEW_MARKER)); 90 | 91 | if (context.isAborted()) { 92 | overview = overview.replaceAll("href=", "nolink="); 93 | } else { 94 | String detail = ViewProcessor.populateTemplate(c, "context", blocks.get(DETAILS_MARKER)); 95 | String percentileData = ViewProcessor.populateTemplate( 96 | c.getRequiredPercentiles(), 97 | "context.percentiles", 98 | blocks.get(PERCENTILE_TARGETS_MARKER) 99 | ); 100 | 101 | detail = detail.replaceAll(asRegex(PERCENTILE_TARGETS_MARKER), percentileData); 102 | details.append(detail).append("\n"); 103 | } 104 | overviews.append(overview).append("\n"); 105 | } 106 | 107 | root = root.replaceAll(asRegex(OVERVIEW_MARKER), overviews.toString()); 108 | root = root.replaceAll(asRegex(DETAILS_MARKER), details.toString()); 109 | 110 | Files.write(outputPath, root.getBytes(StandardCharsets.UTF_8)); 111 | 112 | } catch (Exception e) { 113 | throw new IllegalStateException(e); 114 | } 115 | } 116 | 117 | private String asRegex(String marker) { 118 | return marker.replaceAll("\\{", "\\\\{").replaceAll("\\}", "\\\\}"); 119 | } 120 | 121 | 122 | @UtilityClass 123 | public class HtmlTemplateProcessor { 124 | 125 | public static Map parseTemplateBlocks() { 126 | InputStream templateString = HtmlTemplateProcessor.class.getResourceAsStream(REPORT_TEMPLATE); 127 | if (isNull(templateString)) { 128 | throw new IllegalStateException("Report template is missing: " + REPORT_TEMPLATE); 129 | } 130 | 131 | Map contextBlocks = new HashMap<>(); 132 | Deque stack = new ArrayDeque<>(); 133 | 134 | StringBuilder root = new StringBuilder(); 135 | stack.push(root); 136 | contextBlocks.put("root", root); 137 | 138 | Set expectedBlocks = new HashSet<>(); 139 | expectedBlocks.add(OVERVIEW_MARKER); 140 | expectedBlocks.add(DETAILS_MARKER); 141 | expectedBlocks.add(PERCENTILE_TARGETS_MARKER); 142 | 143 | try (Scanner scanner = new Scanner(templateString)) { 144 | while (scanner.hasNext()) { 145 | String line = scanner.nextLine(); 146 | String trimmed = line.trim(); 147 | 148 | if (expectedBlocks.contains(trimmed)) { 149 | // Keep the marker 150 | stack.getFirst().append(line).append("\n"); 151 | 152 | StringBuilder newBlock = new StringBuilder(); 153 | contextBlocks.put(trimmed, newBlock); 154 | stack.push(newBlock); 155 | } else if (trimmed.equals("{% END %}")) { 156 | stack.pop(); 157 | } else { 158 | stack.getFirst().append(line).append("\n"); 159 | } 160 | } 161 | } 162 | return contextBlocks.entrySet() 163 | .stream() 164 | .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toString())); 165 | } 166 | 167 | } 168 | } 169 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/reporting/providers/utils/ViewData.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.providers.utils; 2 | 3 | import com.github.noconnor.junitperf.data.EvaluationContext; 4 | import lombok.EqualsAndHashCode; 5 | import lombok.Getter; 6 | import lombok.Setter; 7 | import lombok.ToString; 8 | 9 | import java.util.Collections; 10 | import java.util.List; 11 | import java.util.stream.Collectors; 12 | import java.util.stream.IntStream; 13 | 14 | import static java.util.Objects.isNull; 15 | import static java.util.Objects.nonNull; 16 | 17 | @Getter 18 | public class ViewData { 19 | 20 | static final String SUCCESS_COLOUR = "#2b67a4"; 21 | static final String FAILED_COLOUR = "#d9534f"; 22 | static final String SKIPPED_COLOUR = "#dcdcdc"; 23 | 24 | @Getter 25 | @Setter 26 | @ToString 27 | @EqualsAndHashCode 28 | public static final class RequiredPercentilesData { 29 | private String percentile; 30 | private String percentileResultColour; 31 | private String percentileLatency; 32 | private String percentileTarget; 33 | } 34 | 35 | private final String testName; 36 | private final String uniqueId; 37 | private final String testNameColour; 38 | private final String chartData; 39 | private final String csvData; 40 | private final String startTime; 41 | private final String totalInvocations; 42 | private final String successfulInvocations; 43 | private final String errorThresholdColour; 44 | private final String errorCount; 45 | private final String errorPercentage; 46 | private final String configuredThreads; 47 | private final String configuredWarmUp; 48 | private final String configuredRampUpPeriodMs; 49 | private final String testDurationFormatted; 50 | private final String throughputAchievedColour; 51 | private final String throughputQps; 52 | private final String requiredThroughput; 53 | private final String minLatencyAchievedColour; 54 | private final String requiredMinLatency; 55 | private final String minLatency; 56 | private final String meanLatencyAchievedColour; 57 | private final String meanLatency; 58 | private final String requiredMeanLatency; 59 | private final String maxLatencyAchievedColour; 60 | private final String maxLatency; 61 | private final String requiredMaxLatency; 62 | private final List requiredPercentiles; 63 | 64 | public ViewData(EvaluationContext context) { 65 | this.testName = buildTestName(context); 66 | this.uniqueId = context.getUniqueId(); 67 | this.testNameColour = context.isAborted() ? SKIPPED_COLOUR : context.isSuccessful() ? SUCCESS_COLOUR : FAILED_COLOUR; 68 | this.chartData = buildChartData(context); 69 | this.csvData = buildCsvData(context); 70 | this.startTime = context.getStartTime(); 71 | this.totalInvocations = formatNumber(context.getEvaluationCount(), 0, ","); 72 | this.successfulInvocations = formatNumber(context.getEvaluationCount() - context.getErrorCount(), 0, ","); 73 | this.errorThresholdColour = context.isErrorThresholdAchieved() ? SUCCESS_COLOUR : FAILED_COLOUR; 74 | this.errorCount = formatNumber(context.getErrorCount(), 0, ","); 75 | this.errorPercentage = formatNumber(context.getErrorPercentage(), 2, ","); 76 | this.configuredThreads = String.valueOf(context.getConfiguredThreads()); 77 | this.configuredWarmUp = formatNumber(context.getConfiguredWarmUp(), 0, ","); 78 | this.configuredRampUpPeriodMs = formatNumber(context.getConfiguredRampUpPeriodMs(), 0, ","); 79 | this.testDurationFormatted = context.getTestDurationFormatted(); 80 | this.throughputAchievedColour = context.isThroughputAchieved() ? SUCCESS_COLOUR : FAILED_COLOUR; 81 | this.throughputQps = formatNumber(context.getThroughputQps(), 0, ","); 82 | this.requiredThroughput = formatNumber(context.getRequiredThroughput(), 0, ","); 83 | this.minLatencyAchievedColour = context.isMinLatencyAchieved() ? SUCCESS_COLOUR : FAILED_COLOUR; 84 | this.requiredMinLatency = (context.getRequiredMinLatency() < 0) ? "N/A" : formatNumber(context.getRequiredMinLatency(), 2, ""); 85 | this.minLatency = formatNumber(context.getMinLatencyMs(), 2, " "); 86 | this.meanLatencyAchievedColour = context.isMeanLatencyAchieved() ? SUCCESS_COLOUR : FAILED_COLOUR; 87 | this.meanLatency = formatNumber(context.getMeanLatencyMs(), 2, " "); 88 | this.requiredMeanLatency = (context.getRequiredMeanLatency() < 0) ? "N/A" : formatNumber(context.getRequiredMeanLatency(), 2, ""); 89 | this.maxLatencyAchievedColour = context.isMaxLatencyAchieved() ? SUCCESS_COLOUR : FAILED_COLOUR; 90 | this.maxLatency = formatNumber(context.getMaxLatencyMs(), 2, ","); 91 | this.requiredMaxLatency = (context.getRequiredMaxLatency() < 0) ? "N/A" : formatNumber(context.getRequiredMaxLatency(), 2, ""); 92 | this.requiredPercentiles = buildRequiredPercentileData(context); 93 | } 94 | 95 | private static String buildTestName(EvaluationContext context) { 96 | String baseName = nonNull(context.getGroupName()) ? context.getGroupName() + " : " + context.getTestName() : context.getTestName(); 97 | if (context.isAborted()){ 98 | baseName = baseName + (" (skipped)"); 99 | } 100 | return baseName; 101 | } 102 | 103 | private List buildRequiredPercentileData(EvaluationContext context) { 104 | if (isNull(context.getPercentileResults())) { 105 | return Collections.emptyList(); 106 | } 107 | return context.getRequiredPercentiles().entrySet() 108 | .stream() 109 | .map(entry -> { 110 | Integer percentile = entry.getKey(); 111 | Float target = entry.getValue(); 112 | RequiredPercentilesData data = new RequiredPercentilesData(); 113 | data.percentile = percentile.toString(); 114 | data.percentileResultColour = context.getPercentileResults().get(percentile) ? SUCCESS_COLOUR : FAILED_COLOUR; 115 | data.percentileLatency = formatNumber(context.getLatencyPercentileMs(percentile), 2, ","); 116 | data.percentileTarget = formatNumber(target, 2, ","); 117 | return data; 118 | }).collect(Collectors.toList()); 119 | } 120 | 121 | private static String buildCsvData(EvaluationContext context) { 122 | return IntStream.range(1, 101).mapToObj(i -> "[ " + 123 | i + ", " + 124 | context.getLatencyPercentileMs(i) 125 | + " ]," 126 | ).collect(Collectors.joining("\n")); 127 | } 128 | 129 | private static String buildChartData(EvaluationContext context) { 130 | return IntStream.range(1, 100).mapToObj(i -> "[ " + 131 | i + ", " + 132 | context.getLatencyPercentileMs(i) + ", " + 133 | "\"" + i + "% of executions ≤ " + formatNumber(context.getLatencyPercentileMs(i), 2, ",") + "ms\"" 134 | + "]," 135 | ).collect(Collectors.joining("\n")); 136 | } 137 | 138 | private static String formatNumber(float value, int decimalPlaces, String thousandSeparator) { 139 | return String.format("%" + thousandSeparator + "." + decimalPlaces + "f", value).trim(); 140 | } 141 | 142 | } 143 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/reporting/providers/utils/ViewProcessor.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.providers.utils; 2 | 3 | import lombok.experimental.UtilityClass; 4 | 5 | import java.lang.reflect.Field; 6 | import java.util.List; 7 | 8 | @UtilityClass 9 | public class ViewProcessor { 10 | 11 | @SuppressWarnings("rawtypes") 12 | public static String populateTemplate(Object obj, String prefix, String template) throws IllegalAccessException { 13 | String temp = template; 14 | Field[] fields = obj.getClass().getDeclaredFields(); 15 | 16 | if (obj instanceof Iterable) { 17 | StringBuilder result = new StringBuilder(); 18 | for (Object data : (List) obj) { 19 | String tmp = populateTemplate(data, prefix, template); 20 | result.append(tmp).append("\n"); 21 | } 22 | temp = result.toString(); 23 | } else { 24 | for (Field f : fields) { 25 | f.setAccessible(true); 26 | String target = "\\{\\{ " + prefix + "." + f.getName() + " \\}\\}"; 27 | Object value = f.get(obj); 28 | temp = temp.replaceAll(target, String.valueOf(value)); 29 | } 30 | } 31 | return temp; 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/reporting/utils/FormatterUtils.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.utils; 2 | 3 | public class FormatterUtils { 4 | 5 | public static String format(float latency){ 6 | return latency < 0 ? "N/A" : Float.toString(latency); 7 | } 8 | 9 | } 10 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/statements/EvaluationTask.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 4 | import com.google.common.util.concurrent.RateLimiter; 5 | import lombok.Builder; 6 | import lombok.SneakyThrows; 7 | import lombok.extern.slf4j.Slf4j; 8 | 9 | import java.util.function.Supplier; 10 | 11 | import static com.github.noconnor.junitperf.statements.ExceptionsRegistry.reThrowIfAbort; 12 | import static java.lang.System.nanoTime; 13 | import static java.util.Objects.isNull; 14 | import static java.util.Objects.nonNull; 15 | import static java.util.concurrent.TimeUnit.MILLISECONDS; 16 | import static java.util.concurrent.TimeUnit.NANOSECONDS; 17 | 18 | @Slf4j 19 | final class EvaluationTask implements Runnable { 20 | 21 | private final TestStatement statement; 22 | private final RateLimiter rateLimiter; 23 | private final Supplier terminator; 24 | private final StatisticsCalculator stats; 25 | private final long warmUpPeriodNs; 26 | private final long executionTarget; 27 | 28 | @Builder 29 | EvaluationTask(TestStatement statement, 30 | RateLimiter rateLimiter, 31 | StatisticsCalculator stats, 32 | Supplier terminator, 33 | int warmUpPeriodMs, 34 | int executionTarget) { 35 | this(statement, rateLimiter, terminator, stats, warmUpPeriodMs, executionTarget); 36 | } 37 | 38 | // Test only 39 | EvaluationTask(TestStatement statement, 40 | RateLimiter rateLimiter, 41 | Supplier terminator, 42 | StatisticsCalculator stats, 43 | int warmUpPeriodMs, 44 | int executionTarget) { 45 | this.statement = statement; 46 | this.rateLimiter = rateLimiter; 47 | this.terminator = terminator; 48 | this.stats = stats; 49 | this.warmUpPeriodNs = NANOSECONDS.convert(Math.max(warmUpPeriodMs, 0), MILLISECONDS); 50 | this.executionTarget = executionTarget; 51 | } 52 | 53 | @SneakyThrows 54 | @Override 55 | public void run() { 56 | long startTimeNs = nanoTime(); 57 | long startMeasurements = startTimeNs + warmUpPeriodNs; 58 | while (terminationFlagNotSet() && threadNotInterrupted() && executionTargetNotMet()) { 59 | waitForPermit(); 60 | evaluateStatement(startMeasurements); 61 | } 62 | } 63 | 64 | private boolean terminationFlagNotSet() { 65 | return !terminator.get(); 66 | } 67 | 68 | private static boolean threadNotInterrupted() { 69 | return !Thread.currentThread().isInterrupted(); 70 | } 71 | 72 | private boolean executionTargetNotMet() { 73 | return executionTarget <= 0 || stats.getEvaluationCount() < executionTarget; 74 | } 75 | 76 | private void evaluateStatement(long startMeasurements) throws Throwable { 77 | if (nanoTime() < startMeasurements) { 78 | try { 79 | statement.runBefores(); 80 | statement.evaluate(); 81 | statement.runAfters(); 82 | } catch (InterruptedException e) { 83 | Thread.currentThread().interrupt(); 84 | } catch (Throwable throwable) { 85 | log.trace("Warmup error", throwable); 86 | } 87 | } else { 88 | 89 | try { 90 | statement.runBefores(); 91 | } catch (InterruptedException e) { 92 | Thread.currentThread().interrupt(); 93 | } catch (Throwable throwable) { 94 | log.trace("Setup error", throwable); 95 | reThrowIfAbort(throwable); 96 | if (isTerminalException(throwable)) { 97 | throw new IllegalStateException("Before method failed", throwable); 98 | } 99 | } 100 | 101 | long startTimeNs = nanoTime(); 102 | try { 103 | statement.evaluate(); 104 | stats.addLatencyMeasurement(nanoTime() - startTimeNs); 105 | stats.incrementEvaluationCount(); 106 | } catch (InterruptedException e) { 107 | Thread.currentThread().interrupt(); 108 | } catch (Throwable throwable) { 109 | log.trace("Execution error", throwable); 110 | reThrowIfAbort(throwable); 111 | checkForIgnorable(throwable); 112 | stats.addLatencyMeasurement(nanoTime() - startTimeNs); 113 | } 114 | 115 | try { 116 | statement.runAfters(); 117 | } catch (InterruptedException e) { 118 | Thread.currentThread().interrupt(); 119 | } catch (Throwable throwable) { 120 | log.trace("Teardown error", throwable); 121 | reThrowIfAbort(throwable); 122 | if (isTerminalException(throwable)) { 123 | throw new IllegalStateException("After method failed", throwable); 124 | } 125 | } 126 | 127 | } 128 | } 129 | 130 | private void checkForIgnorable(Throwable throwable) { 131 | if (isIgnorableException(throwable)) { 132 | stats.incrementEvaluationCount(); 133 | } else { 134 | stats.incrementEvaluationCount(); 135 | stats.incrementErrorCount(); 136 | } 137 | } 138 | 139 | private boolean isTerminalException(Throwable throwable) { 140 | return !isIgnorableException(throwable); 141 | } 142 | 143 | private boolean isIgnorableException(Throwable throwable) { 144 | return isIgnorable(throwable); 145 | } 146 | 147 | private boolean isIgnorable(Throwable throwable) { 148 | if (isNull(throwable)) { 149 | return false; 150 | } 151 | return ExceptionsRegistry.isIgnorable(throwable) || isIgnorable(throwable.getCause()); 152 | } 153 | 154 | private void waitForPermit() { 155 | if (nonNull(rateLimiter)) { 156 | rateLimiter.acquire(); 157 | } 158 | } 159 | 160 | } 161 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/statements/ExceptionsRegistry.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import lombok.experimental.UtilityClass; 4 | 5 | import java.lang.reflect.InvocationTargetException; 6 | import java.util.Collections; 7 | import java.util.HashSet; 8 | import java.util.Set; 9 | 10 | @UtilityClass 11 | public class ExceptionsRegistry { 12 | 13 | private static final Set> IGNORABLE_EXCEPTIONS_REGISTRY = new HashSet<>(); 14 | private static final Set> ABORT_EXCEPTIONS_REGISTRY = new HashSet<>(); 15 | 16 | public static void registerIgnorable(Class exception) { 17 | IGNORABLE_EXCEPTIONS_REGISTRY.add(exception); 18 | } 19 | 20 | public static void registerAbort(Class exception) { 21 | ABORT_EXCEPTIONS_REGISTRY.add(exception); 22 | } 23 | 24 | public static boolean isIgnorable(Throwable throwable) { 25 | return IGNORABLE_EXCEPTIONS_REGISTRY.contains(throwable.getClass()); 26 | } 27 | 28 | public static void reThrowIfAbort(Throwable throwable) throws Throwable { 29 | Throwable targetException = throwable; 30 | if (throwable instanceof InvocationTargetException) { 31 | targetException = ((InvocationTargetException) throwable).getTargetException(); 32 | } 33 | if (ABORT_EXCEPTIONS_REGISTRY.contains(targetException.getClass())) { 34 | // re-throw abortable exceptions 35 | throw targetException; 36 | } 37 | } 38 | 39 | public static Set> ignorables() { 40 | return Collections.unmodifiableSet(IGNORABLE_EXCEPTIONS_REGISTRY); 41 | } 42 | 43 | public static Set> abortables() { 44 | return Collections.unmodifiableSet(ABORT_EXCEPTIONS_REGISTRY); 45 | } 46 | 47 | static void clearRegistry() { 48 | IGNORABLE_EXCEPTIONS_REGISTRY.clear(); 49 | ABORT_EXCEPTIONS_REGISTRY.clear(); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/statements/PerformanceEvaluationStatement.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import com.github.noconnor.junitperf.data.EvaluationContext; 4 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 5 | import com.github.noconnor.junitperf.statistics.providers.NoOpStatisticsCollector; 6 | import com.google.common.util.concurrent.RateLimiter; 7 | import com.google.common.util.concurrent.ThreadFactoryBuilder; 8 | import lombok.Builder; 9 | 10 | import java.util.List; 11 | import java.util.concurrent.CountDownLatch; 12 | import java.util.concurrent.ThreadFactory; 13 | import java.util.concurrent.atomic.AtomicBoolean; 14 | import java.util.function.Consumer; 15 | 16 | import static com.google.common.collect.Lists.newArrayList; 17 | import static com.google.common.util.concurrent.RateLimiter.create; 18 | import static java.lang.String.format; 19 | import static java.lang.System.nanoTime; 20 | import static java.util.Objects.nonNull; 21 | import static java.util.concurrent.TimeUnit.MILLISECONDS; 22 | 23 | public class PerformanceEvaluationStatement { 24 | 25 | private static final String THREAD_NAME_PATTERN = "perf-eval-thread-%d"; 26 | private static final ThreadFactory FACTORY = new ThreadFactoryBuilder().setNameFormat(THREAD_NAME_PATTERN).build(); 27 | 28 | private final EvaluationContext context; 29 | private final ThreadFactory threadFactory; 30 | private final TestStatement baseStatement; 31 | private final StatisticsCalculator statistics; 32 | private final Consumer listener; 33 | 34 | private RateLimiter rateLimiter; 35 | 36 | @Builder 37 | private PerformanceEvaluationStatement(TestStatement baseStatement, 38 | StatisticsCalculator statistics, 39 | EvaluationContext context, 40 | ThreadFactory threadFactory, 41 | Consumer listener) { 42 | this.context = context; 43 | this.baseStatement = baseStatement; 44 | this.statistics = statistics; 45 | this.threadFactory = nonNull(threadFactory) ? threadFactory : FACTORY; 46 | this.rateLimiter = context.getConfiguredRateLimit() > 0 ? createRateLimiter(context) : null; 47 | this.listener = listener; 48 | } 49 | 50 | public void runParallelEvaluation() throws Throwable { 51 | statistics.reset(); 52 | List threads = newArrayList(); 53 | AtomicBoolean stopSignal = new AtomicBoolean(); 54 | CountDownLatch latch = new CountDownLatch(context.getConfiguredThreads()); 55 | 56 | try { 57 | 58 | for (int i = 0; i < context.getConfiguredThreads(); i++) { 59 | Thread t = threadFactory.newThread(createTask(stopSignal, latch)); 60 | threads.add(t); 61 | t.start(); 62 | } 63 | 64 | //noinspection ResultOfMethodCallIgnored 65 | latch.await(context.getConfiguredDuration(), MILLISECONDS); 66 | 67 | } finally { 68 | stopSignal.set(true); 69 | threads.forEach(Thread::interrupt); 70 | } 71 | if (context.isAborted()) { 72 | listener.accept(null); 73 | throw context.getAbortedException(); 74 | } 75 | context.setFinishTimeNs(nanoTime()); 76 | context.setStatistics(statistics); 77 | context.runValidation(); 78 | listener.accept(null); 79 | assertThresholdsMet(); 80 | } 81 | 82 | private Runnable createTask(AtomicBoolean stopSignal, CountDownLatch latch) { 83 | StatisticsCalculator stats = context.isAsyncEvaluation() ? NoOpStatisticsCollector.INSTANCE : statistics; 84 | return () -> { 85 | try { 86 | EvaluationTask.builder() 87 | .statement(baseStatement) 88 | .rateLimiter(rateLimiter) 89 | .stats(stats) 90 | .terminator(stopSignal::get) 91 | .warmUpPeriodMs(context.getConfiguredWarmUp()) 92 | .executionTarget(context.getConfiguredExecutionTarget()) 93 | .build() 94 | .run(); 95 | } catch (Throwable t) { 96 | context.setAbortedException(t); 97 | } finally { 98 | latch.countDown(); 99 | } 100 | }; 101 | } 102 | 103 | private void assertThresholdsMet() { 104 | assertThat("Error threshold not achieved", context.isErrorThresholdAchieved(), true); 105 | assertThat("Test throughput threshold not achieved", context.isThroughputAchieved(), true); 106 | assertThat("Test min latency threshold not achieved", context.isMinLatencyAchieved(), true); 107 | assertThat("Test max latency threshold not achieved", context.isMaxLatencyAchieved(), true); 108 | assertThat("Test mean latency threshold not achieved", context.isMeanLatencyAchieved(), true); 109 | context.getPercentileResults().forEach((percentile, isAchieved) -> { 110 | assertThat(format("%dth Percentile has not achieved required threshold", percentile), isAchieved, true); 111 | }); 112 | } 113 | 114 | private RateLimiter createRateLimiter(final EvaluationContext context) { 115 | int rampUp = context.getConfiguredRampUpPeriodMs(); 116 | int rateLimit = context.getConfiguredRateLimit(); 117 | return rampUp > 0 ? create(rateLimit, rampUp, MILLISECONDS) : create(rateLimit); 118 | } 119 | 120 | private void assertThat(String message, boolean actual, boolean expected){ 121 | if (actual != expected){ 122 | throw new AssertionError(message); 123 | } 124 | } 125 | 126 | 127 | } 128 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/statements/SimpleTestStatement.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | public interface SimpleTestStatement extends TestStatement { 4 | 5 | @Override 6 | default void runBefores() { 7 | // Do nothing 8 | } 9 | 10 | @Override 11 | default void runAfters() { 12 | // Do nothing 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/statements/TestStatement.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | public interface TestStatement { 4 | 5 | void runBefores() throws Throwable; 6 | 7 | void evaluate() throws Throwable; 8 | 9 | void runAfters() throws Throwable; 10 | } 11 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/statistics/StatisticsCalculator.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statistics; 2 | 3 | import java.util.concurrent.TimeUnit; 4 | 5 | public interface StatisticsCalculator { 6 | 7 | void addLatencyMeasurement(long executionTimeNs); 8 | 9 | void incrementErrorCount(); 10 | 11 | void incrementEvaluationCount(); 12 | 13 | long getErrorCount(); 14 | 15 | long getEvaluationCount(); 16 | 17 | float getLatencyPercentile(int percentile, TimeUnit unit); 18 | 19 | float getMaxLatency(TimeUnit unit); 20 | 21 | float getMinLatency(TimeUnit unit); 22 | 23 | float getMeanLatency(TimeUnit unit); 24 | 25 | float getErrorPercentage(); 26 | 27 | void reset(); 28 | 29 | } 30 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/statistics/providers/DescriptiveStatisticsCalculator.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statistics.providers; 2 | 3 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 4 | import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; 5 | import org.apache.commons.math3.stat.descriptive.SynchronizedDescriptiveStatistics; 6 | 7 | import java.util.concurrent.TimeUnit; 8 | import java.util.concurrent.atomic.AtomicLong; 9 | 10 | public class DescriptiveStatisticsCalculator implements StatisticsCalculator { 11 | 12 | private final DescriptiveStatistics statistics; 13 | private final AtomicLong errorCount = new AtomicLong(); 14 | private final AtomicLong evaluationCount = new AtomicLong(); 15 | 16 | // http://commons.apache.org/proper/commons-math/userguide/stat.html#a1.2_Descriptive_statistics 17 | public DescriptiveStatisticsCalculator() { 18 | this(new SynchronizedDescriptiveStatistics()); 19 | } 20 | 21 | public DescriptiveStatisticsCalculator(int windowSize) { 22 | this(new SynchronizedDescriptiveStatistics(windowSize)); 23 | } 24 | 25 | public DescriptiveStatisticsCalculator(DescriptiveStatistics statistics) { 26 | this.statistics = statistics; 27 | } 28 | 29 | @Override 30 | public void addLatencyMeasurement(long executionTimeNs) { 31 | statistics.addValue(executionTimeNs); 32 | } 33 | 34 | @Override 35 | public void incrementErrorCount() { 36 | errorCount.incrementAndGet(); 37 | } 38 | 39 | @Override 40 | public void incrementEvaluationCount() { 41 | evaluationCount.incrementAndGet(); 42 | } 43 | 44 | @Override 45 | public long getErrorCount() { 46 | return errorCount.get(); 47 | } 48 | 49 | @Override 50 | public long getEvaluationCount() { 51 | return evaluationCount.get(); 52 | } 53 | 54 | @Override 55 | public float getLatencyPercentile(int percentile, TimeUnit unit) { 56 | float value = (float) statistics.getPercentile((double) (percentile)); 57 | return value > 0 ? value / unit.toNanos(1) : 0; 58 | } 59 | 60 | @Override 61 | public float getMaxLatency(TimeUnit unit) { 62 | float max = (float) statistics.getMax(); 63 | return max > 0 ? max / unit.toNanos(1) : 0; 64 | } 65 | 66 | @Override 67 | public float getMinLatency(TimeUnit unit) { 68 | float min = (float) statistics.getMin(); 69 | return min > 0 ? min / unit.toNanos(1) : 0; 70 | } 71 | 72 | @Override 73 | public float getMeanLatency(TimeUnit unit) { 74 | float mean = (float) statistics.getMean(); 75 | return mean > 0 ? mean / unit.toNanos(1) : 0; 76 | } 77 | 78 | @Override 79 | public float getErrorPercentage() { 80 | float evalCount = evaluationCount.get(); 81 | float errCount = errorCount.get(); 82 | return evalCount > 0 ? (errCount / evalCount) * 100 : 0; 83 | } 84 | 85 | @Override 86 | public void reset() { 87 | statistics.clear(); 88 | evaluationCount.set(0); 89 | errorCount.set(0); 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /junitperf-core/src/main/java/com/github/noconnor/junitperf/statistics/providers/NoOpStatisticsCollector.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statistics.providers; 2 | 3 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 4 | import java.util.concurrent.TimeUnit; 5 | 6 | public class NoOpStatisticsCollector implements StatisticsCalculator { 7 | 8 | public static final NoOpStatisticsCollector INSTANCE = new NoOpStatisticsCollector(); 9 | 10 | @Override 11 | public void addLatencyMeasurement(long executionTimeNs) { 12 | } 13 | 14 | @Override 15 | public void incrementErrorCount() { 16 | } 17 | 18 | @Override 19 | public void incrementEvaluationCount() { 20 | } 21 | 22 | @Override 23 | public long getErrorCount() { 24 | return 0; 25 | } 26 | 27 | @Override 28 | public long getEvaluationCount() { 29 | return 0; 30 | } 31 | 32 | @Override 33 | public float getLatencyPercentile(int percentile, TimeUnit unit) { 34 | return 0; 35 | } 36 | 37 | @Override 38 | public float getMaxLatency(TimeUnit unit) { 39 | return 0; 40 | } 41 | 42 | @Override 43 | public float getMinLatency(TimeUnit unit) { 44 | return 0; 45 | } 46 | 47 | @Override 48 | public float getMeanLatency(TimeUnit unit) { 49 | return 0; 50 | } 51 | 52 | @Override 53 | public float getErrorPercentage() { 54 | return 0; 55 | } 56 | 57 | @Override 58 | public void reset() { 59 | 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/BaseTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import org.junit.Ignore; 4 | import org.mockito.MockitoAnnotations; 5 | 6 | @Ignore 7 | public class BaseTest { 8 | 9 | public BaseTest() { 10 | MockitoAnnotations.initMocks(this); 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/data/NoOpTestContextTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.data; 2 | 3 | import org.junit.Before; 4 | import org.junit.Test; 5 | 6 | public class NoOpTestContextTest { 7 | 8 | private NoOpTestContext context; 9 | 10 | @Before 11 | public void setup() { 12 | context = new NoOpTestContext(); 13 | } 14 | 15 | @Test 16 | public void whenCallingSuccess_thenNoExceptionsShouldBeThrown() { 17 | context.success(); 18 | } 19 | 20 | @Test 21 | public void whenCallingFail_thenNoExceptionsShouldBeThrown() { 22 | context.fail(); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/data/TestContextTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.data; 2 | 3 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 4 | import org.junit.Before; 5 | import org.junit.Test; 6 | import org.junit.runner.RunWith; 7 | import org.mockito.ArgumentCaptor; 8 | import org.mockito.Mock; 9 | import org.mockito.junit.MockitoJUnitRunner; 10 | 11 | import static java.lang.Thread.sleep; 12 | import static org.hamcrest.CoreMatchers.is; 13 | import static org.hamcrest.MatcherAssert.assertThat; 14 | import static org.hamcrest.Matchers.greaterThanOrEqualTo; 15 | import static org.mockito.Mockito.verify; 16 | 17 | @RunWith(MockitoJUnitRunner.StrictStubs.class) 18 | public class TestContextTest { 19 | 20 | @Mock 21 | private StatisticsCalculator calculatorMock; 22 | 23 | private TestContext context; 24 | 25 | @Before 26 | public void setup(){ 27 | context = new TestContext(calculatorMock); 28 | } 29 | 30 | @Test 31 | public void whenSuccessIsCalled_thenStatsEvaluationCountShouldBeUpdated(){ 32 | context.success(); 33 | verify(calculatorMock).incrementEvaluationCount(); 34 | } 35 | 36 | @Test 37 | public void whenSuccessIsCalled_thenStatsAddLatencyMeasurementShouldBeUpdatedWithTheCorrectTime() throws InterruptedException { 38 | sleep(10); 39 | context.success(); 40 | ArgumentCaptor captor = ArgumentCaptor.forClass(Long.class); 41 | verify(calculatorMock).addLatencyMeasurement(captor.capture()); 42 | assertThat(captor.getValue(), is(greaterThanOrEqualTo(10L))); 43 | } 44 | 45 | @Test 46 | public void whenFailIsCalled_thenStatsEvaluationCountShouldBeUpdated(){ 47 | context.fail(); 48 | verify(calculatorMock).incrementEvaluationCount(); 49 | } 50 | 51 | @Test 52 | public void whenFailIsCalled_thenStatsAddLatencyMeasurementShouldBeUpdatedWithTheCorrectTime() throws InterruptedException { 53 | sleep(5); 54 | context.fail(); 55 | ArgumentCaptor captor = ArgumentCaptor.forClass(Long.class); 56 | verify(calculatorMock).addLatencyMeasurement(captor.capture()); 57 | assertThat(captor.getValue(), is(greaterThanOrEqualTo(5L))); 58 | } 59 | 60 | @Test 61 | public void whenFailIsCalled_thenStatsEvaluationErrorCountShouldBeUpdated(){ 62 | context.fail(); 63 | verify(calculatorMock).incrementErrorCount(); 64 | } 65 | 66 | } 67 | -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/datetime/DatetimeUtilsTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.datetime; 2 | 3 | import static org.junit.Assert.assertEquals; 4 | import static org.junit.Assert.assertTrue; 5 | 6 | import org.junit.Test; 7 | 8 | public class DatetimeUtilsTest { 9 | 10 | @Test 11 | public void whenTimeHasBeenOverridden_thenTheOverriddenTimeShouldBeReturned() { 12 | DatetimeUtils.setOverride("TEST"); 13 | assertEquals("TEST", DatetimeUtils.now()); 14 | } 15 | 16 | @Test 17 | public void whenRetrievingNowTime_thenFormattedStringShouldBeReturned() { 18 | DatetimeUtils.setOverride(null); 19 | String expectedPattern = "\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}"; 20 | assertTrue(DatetimeUtils.now().matches(expectedPattern)); 21 | } 22 | 23 | @Test 24 | public void whenDurationIsValid_thenDurationShouldBeFormatted() { 25 | assertEquals("1d:3h:46m:40s", DatetimeUtils.format(100_000_000)); 26 | assertEquals("2h:46m:40s", DatetimeUtils.format(10_000_000)); 27 | assertEquals("16m:40s", DatetimeUtils.format(1_000_000)); 28 | assertEquals("59s", DatetimeUtils.format(59_000)); 29 | assertEquals("300ms", DatetimeUtils.format(300)); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/reporting/providers/ConsoleReportGeneratorTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.providers; 2 | 3 | import java.util.LinkedHashSet; 4 | import org.junit.Before; 5 | import org.junit.Test; 6 | import com.github.noconnor.junitperf.data.EvaluationContext; 7 | import com.github.noconnor.junitperf.reporting.BaseReportGeneratorTest; 8 | 9 | import static org.junit.Assert.assertNull; 10 | 11 | public class ConsoleReportGeneratorTest extends BaseReportGeneratorTest { 12 | 13 | private ConsoleReportGenerator reportGenerator; 14 | 15 | @Before 16 | public void setup() { 17 | reportGenerator = new ConsoleReportGenerator(); 18 | initialisePerfTestAnnotationMock(); 19 | initialisePerfTestRequirementAnnotationMock(); 20 | } 21 | 22 | @Test 23 | public void whenGeneratingAReport_andAllTestsFailed_thenAppropriateReportShouldBeGenerated() { 24 | reportGenerator.generateReport(generateAllFailureOrderedContexts()); 25 | } 26 | 27 | @Test 28 | public void whenGeneratingAReport_andAllTestsPass_thenAppropriateReportShouldBeGenerated() { 29 | reportGenerator.generateReport(generateAllPassedOrderedContexts()); 30 | } 31 | 32 | @Test 33 | public void whenGeneratingAReport_andTestsContainsAMixOfPassAndFailures_thenAppropriateReportShouldBeGenerated() { 34 | reportGenerator.generateReport(generateMixedOrderedContexts()); 35 | } 36 | 37 | @Test 38 | public void whenGeneratingAReport_andTestsContainsSomeFailures_thenAppropriateReportShouldBeGenerated() { 39 | reportGenerator.generateReport(generateSomeFailuresContext()); 40 | } 41 | 42 | @Test 43 | public void whenGeneratingAReport_andTestsContainsSomeAborts_thenAppropriateReportShouldBeGenerated() { 44 | LinkedHashSet contexts = new LinkedHashSet<>(); 45 | contexts.add(createdAbortedEvaluationContext("unittest1")); 46 | reportGenerator.generateReport(contexts); 47 | } 48 | 49 | @Test 50 | public void whenGeneratingAReport_andGenerateIsCalledMultipleTimes_thenOnlyNewResultsShouldBePrinted() { 51 | LinkedHashSet contexts = generateSomeFailuresContext(); 52 | reportGenerator.generateReport(contexts); 53 | reportGenerator.generateReport(contexts); 54 | } 55 | 56 | @Test 57 | public void whenCallingGetReportPath_thenNullShouldBeReturned() { 58 | assertNull(reportGenerator.getReportPath()); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/reporting/providers/CsvReportGeneratorTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.providers; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.util.Locale; 6 | 7 | import org.junit.After; 8 | import org.junit.Before; 9 | import org.junit.Test; 10 | import com.github.noconnor.junitperf.reporting.BaseReportGeneratorTest; 11 | 12 | import static java.lang.System.getProperty; 13 | import static org.hamcrest.Matchers.is; 14 | import static org.junit.Assert.assertEquals; 15 | import static org.junit.Assert.assertThat; 16 | 17 | public class CsvReportGeneratorTest extends BaseReportGeneratorTest { 18 | 19 | private Locale defaultLocale; 20 | 21 | private CsvReportGenerator reportGenerator; 22 | 23 | private File reportFile; 24 | 25 | @Before 26 | public void setup() throws IOException { 27 | reportFile = folder.newFile("report.csv"); 28 | reportGenerator = new CsvReportGenerator(reportFile.getPath()); 29 | initialisePerfTestAnnotationMock(); 30 | initialisePerfTestRequirementAnnotationMock(); 31 | defaultLocale = Locale.getDefault(); 32 | // set local to en-US as this test expects numbers to use "." 33 | // as a decimal separator and "," as a grouping separator, e.g. 1,337.42 34 | Locale.setDefault(Locale.US); 35 | } 36 | 37 | @After 38 | public void after() { 39 | // restore default locale 40 | Locale.setDefault(defaultLocale); 41 | } 42 | 43 | @Test 44 | public void whenCallingDefaultConstructor_thenNoExceptionShouldBeThrown() throws IOException { 45 | reportGenerator = new CsvReportGenerator(); 46 | } 47 | 48 | @Test(expected = IllegalStateException.class) 49 | public void whenGeneratingAReport_andPathIsNotWritable_thenExceptionShouldBeThrown() throws IOException { 50 | reportGenerator = new CsvReportGenerator("///foo"); 51 | reportGenerator.generateReport(generateAllFailureOrderedContexts()); 52 | } 53 | 54 | @Test 55 | public void whenGeneratingAReport_andAllTestsFailed_thenAppropriateReportShouldBeGenerated() throws IOException { 56 | reportGenerator.generateReport(generateAllFailureOrderedContexts()); 57 | File expectedContents = getResourceFile("csv/failed.csv"); 58 | assertThat(readFileContents(reportFile), is(readFileContents(expectedContents))); 59 | } 60 | 61 | @Test 62 | public void whenGeneratingAReport_andAllTestsPass_thenAppropriateReportShouldBeGenerated() throws IOException { 63 | reportGenerator.generateReport(generateAllPassedOrderedContexts()); 64 | File expectedContents = getResourceFile("csv/passed.csv"); 65 | assertThat(readFileContents(reportFile), is(readFileContents(expectedContents))); 66 | } 67 | 68 | @Test 69 | public void whenGeneratingAReport_andTestsContainsAMixOfPassAndFailures_thenAppropriateReportShouldBeGenerated() throws IOException { 70 | reportGenerator.generateReport(generateMixedOrderedContexts()); 71 | File expectedContents = getResourceFile("csv/mix.csv"); 72 | assertThat(readFileContents(reportFile), is(readFileContents(expectedContents))); 73 | } 74 | 75 | @Test 76 | public void whenGeneratingAReport_andTestsContainsSomeFailures_thenAppropriateReportShouldBeGenerated() throws IOException { 77 | reportGenerator.generateReport(generateSomeFailuresContext()); 78 | File expectedContents = getResourceFile("csv/some_failures.csv"); 79 | assertThat(readFileContents(reportFile), is(readFileContents(expectedContents))); 80 | } 81 | 82 | @Test 83 | public void whenGeneratingAReport_andTestsContainsSomeAbortsAndFailures_thenAppropriateReportShouldBeGenerated() throws IOException { 84 | reportGenerator.generateReport(generateAbortedFailedAndSuccessContexts()); 85 | File expectedContents = getResourceFile("csv/fail_abort_succeed.csv"); 86 | assertEquals(readFileContents(expectedContents), readFileContents(reportFile)); 87 | } 88 | 89 | @Test 90 | public void whenGeneratingAReport_andTestsContainsSomeAbortsAndFailures_andGenerateReportIsCalledMultipleTimes_thenAppropriateReportShouldBeGenerated() throws IOException { 91 | reportGenerator.generateReport(generateAbortedFailedAndSuccessContexts()); 92 | reportGenerator.generateReport(generateAbortedFailedAndSuccessContexts()); 93 | File expectedContents = getResourceFile("csv/fail_abort_succeed.csv"); 94 | assertEquals(readFileContents(expectedContents), readFileContents(reportFile)); 95 | } 96 | 97 | @Test 98 | public void whenCallingGetReportPath_andCustomPathHasBeenSpecified_thenCorrectPathShouldBeReturned() { 99 | assertThat(reportGenerator.getReportPath(), is(reportFile.getPath())); 100 | } 101 | 102 | @Test 103 | public void whenCallingGetReportPath_andDefaultPathHasBeenSpecified_thenCorrectPathShouldBeReturned() { 104 | reportGenerator = new CsvReportGenerator(); 105 | assertThat(reportGenerator.getReportPath(), is(getProperty("user.dir") + "/build/reports/junitperf_report.csv")); 106 | } 107 | 108 | } 109 | -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/reporting/providers/HtmlReportGeneratorTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.providers; 2 | 3 | import com.github.noconnor.junitperf.datetime.DatetimeUtils; 4 | import com.github.noconnor.junitperf.reporting.BaseReportGeneratorTest; 5 | import org.junit.After; 6 | import org.junit.Before; 7 | import org.junit.Test; 8 | 9 | import java.io.File; 10 | import java.io.IOException; 11 | import java.util.Locale; 12 | import java.util.Map; 13 | 14 | import static java.lang.System.getProperty; 15 | import static org.hamcrest.Matchers.is; 16 | import static org.junit.Assert.assertEquals; 17 | import static org.junit.Assert.assertThat; 18 | import static org.junit.Assert.assertTrue; 19 | 20 | public class HtmlReportGeneratorTest extends BaseReportGeneratorTest { 21 | 22 | private Locale defaultLocale; 23 | 24 | private HtmlReportGenerator reportGenerator; 25 | 26 | @Before 27 | public void setup() throws IOException { 28 | reportFile = folder.newFile("report.html"); 29 | reportGenerator = new HtmlReportGenerator(reportFile.getPath()); 30 | DatetimeUtils.setOverride("unittest o'clock"); 31 | initialisePerfTestAnnotationMock(); 32 | initialisePerfTestRequirementAnnotationMock(); 33 | defaultLocale = Locale.getDefault(); 34 | // set local to en-US as this test expects numbers to use "." 35 | // as a decimal separator and "," as a grouping separator, e.g. 1,337.42 36 | Locale.setDefault(Locale.US); 37 | } 38 | 39 | @After 40 | public void after() { 41 | // restore default locale 42 | Locale.setDefault(defaultLocale); 43 | } 44 | 45 | @Test 46 | public void whenCallingDefaultConstructor_thenNoExceptionShouldBeThrown() throws IOException { 47 | reportGenerator = new HtmlReportGenerator(); 48 | } 49 | 50 | @Test(expected = IllegalStateException.class) 51 | public void whenGeneratingAReport_andPathIsNotWritable_thenExceptionShouldBeThrown() throws IOException { 52 | reportGenerator = new HtmlReportGenerator("///foo"); 53 | reportGenerator.generateReport(generateAllFailureOrderedContexts()); 54 | } 55 | 56 | @Test 57 | public void whenGeneratingAReport_andAllTestsFailed_thenAppropriateReportShouldBeGenerated() throws IOException { 58 | reportGenerator.generateReport(generateAllFailureOrderedContexts()); 59 | File expectedContents = getResourceFile("html/example_all_failed_report.html"); 60 | assertEquals(readFileContents(expectedContents), readFileContents(reportFile)); 61 | } 62 | 63 | @Test 64 | public void whenGeneratingAReport_andAllTestsPass_thenAppropriateReportShouldBeGenerated() throws IOException { 65 | reportGenerator.generateReport(generateAllPassedOrderedContexts()); 66 | File expectedContents = getResourceFile("html/example_all_passed_report.html"); 67 | assertEquals(readFileContents(expectedContents), readFileContents(reportFile)); 68 | } 69 | 70 | @Test 71 | public void whenGeneratingAReport_andTestsContainsAMixOfPassAndFailures_thenAppropriateReportShouldBeGenerated() throws IOException { 72 | reportGenerator.generateReport(generateMixedOrderedContexts()); 73 | File expectedContents = getResourceFile("html/example_mixed_report.html"); 74 | assertEquals(readFileContents(expectedContents), readFileContents(reportFile)); 75 | } 76 | 77 | @Test 78 | public void whenGeneratingAReport_andTestsContainsSomeFailures_thenAppropriateReportShouldBeGenerated() throws IOException { 79 | reportGenerator.generateReport(generateSomeFailuresContext()); 80 | File expectedContents = getResourceFile("html/example_some_failures_report.html"); 81 | assertEquals(readFileContents(expectedContents), readFileContents(reportFile)); 82 | } 83 | 84 | @Test 85 | public void whenGeneratingAReport_andTestsContainsSomeAbortsAndFailures_thenAppropriateReportShouldBeGenerated() throws IOException { 86 | reportGenerator.generateReport(generateAbortedFailedAndSuccessContexts()); 87 | File expectedContents = getResourceFile("html/example_aborted_failed_success.html"); 88 | assertEquals(readFileContents(expectedContents), readFileContents(reportFile)); 89 | } 90 | 91 | @Test 92 | public void whenGeneratingAReport_andTestsContainsSomeAbortsAndFailures_andGenerateIsCalledMultipleTimes_thenAppropriateReportShouldBeGenerated() throws IOException { 93 | reportGenerator.generateReport(generateAbortedFailedAndSuccessContexts()); 94 | reportGenerator.generateReport(generateAbortedFailedAndSuccessContexts()); 95 | File expectedContents = getResourceFile("html/example_aborted_failed_success.html"); 96 | assertEquals(readFileContents(expectedContents), readFileContents(reportFile)); 97 | } 98 | 99 | @Test 100 | public void whenCallingGetReportPath_andCustomPathHasBeenSpecified_thenCorrectPathShouldBeReturned() { 101 | assertThat(reportGenerator.getReportPath(), is(reportFile.getPath())); 102 | } 103 | 104 | @Test 105 | public void whenCallingGetReportPath_andDefaultPathHasBeenSpecified_thenCorrectPathShouldBeReturned() { 106 | reportGenerator = new HtmlReportGenerator(); 107 | String expected = String.join( 108 | File.separator, 109 | getProperty("user.dir") , 110 | "build", 111 | "reports", 112 | "junitperf_report.html" 113 | ); 114 | assertEquals(expected, reportGenerator.getReportPath()); 115 | } 116 | 117 | @Test 118 | public void whenHtmlProcessorProcessBlocksIsCalled_thenTheCorrectBlocksShouldBeProcessed() { 119 | Map blocks = HtmlReportGenerator.HtmlTemplateProcessor.parseTemplateBlocks(); 120 | assertEquals(4, blocks.size()); 121 | assertTrue(blocks.containsKey("root")); 122 | assertTrue(blocks.containsKey("{% OVERVIEW_BLOCK %}")); 123 | assertTrue(blocks.containsKey("{% DETAILED_BLOCK %}")); 124 | assertTrue(blocks.containsKey("{% PERCENTILES_BLOCK %}")); 125 | 126 | assertEquals(918, blocks.get("root").length()); 127 | assertEquals(296, blocks.get("{% OVERVIEW_BLOCK %}").length()); 128 | assertEquals(7884, blocks.get("{% DETAILED_BLOCK %}").length()); 129 | assertEquals(704, blocks.get("{% PERCENTILES_BLOCK %}").length()); 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/reporting/providers/utils/ViewProcessorTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.providers.utils; 2 | 3 | import org.junit.Test; 4 | 5 | import java.util.ArrayList; 6 | import java.util.List; 7 | 8 | import static org.junit.Assert.assertEquals; 9 | 10 | public class ViewProcessorTest { 11 | 12 | 13 | @Test 14 | public void templateShouldBePopulatedWithObjectData() throws IllegalAccessException { 15 | Data data = new Data(); 16 | data.name = "test"; 17 | data.email = "a@b.ie"; 18 | data.salary = 123.56F; 19 | 20 | String template = "Name: {{ d.name }}\n Email: {{ d.email }}"; 21 | String result = ViewProcessor.populateTemplate(data, "d", template); 22 | assertEquals( "Name: test\n Email: a@b.ie" , result); 23 | } 24 | 25 | @Test 26 | public void templateShouldDuplicatedWhenListDataIsPassed() throws IllegalAccessException { 27 | Data data1 = new Data(); 28 | data1.name = "test"; 29 | data1.email = "a@b.ie"; 30 | data1.salary = 123.56F; 31 | 32 | Data data2 = new Data(); 33 | data2.email = "t@d.ie"; 34 | data2.salary = 13.4F; 35 | 36 | List listData = new ArrayList<>(); 37 | listData.add(data1); 38 | listData.add(data2); 39 | 40 | String template = "Name: {{ ctxt.name }}\n Email: {{ ctxt.email }}\n Salary: {{ ctxt.salary }}"; 41 | String expected = "Name: test\n Email: a@b.ie\n Salary: 123.56\nName: null\n Email: t@d.ie\n Salary: 13.4\n"; 42 | 43 | String result = ViewProcessor.populateTemplate(listData, "ctxt", template); 44 | assertEquals( expected , result); 45 | } 46 | 47 | 48 | static class Data { 49 | int id; 50 | String name; 51 | String email; 52 | float salary; 53 | } 54 | } -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/reporting/utils/FormatterUtilsTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.reporting.utils; 2 | 3 | import org.junit.Test; 4 | 5 | import static org.hamcrest.core.Is.is; 6 | import static org.junit.Assert.*; 7 | 8 | public class FormatterUtilsTest { 9 | 10 | @Test 11 | public void givenANegativeRequiredThreshold_thenNAStringShouldBeReturned() { 12 | assertThat(FormatterUtils.format(-1), is("N/A")); 13 | assertThat(FormatterUtils.format(-55), is("N/A")); 14 | } 15 | 16 | @Test 17 | public void givenANonNegativeRequiredThreshold_thenStringShouldBeReturned() { 18 | assertThat(FormatterUtils.format(1.909871f), is("1.909871")); 19 | assertThat(FormatterUtils.format(0.98799f), is("0.98799")); 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/statements/ExceptionsRegistryTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import org.junit.After; 4 | import org.junit.AssumptionViolatedException; 5 | import org.junit.Before; 6 | import org.junit.Test; 7 | 8 | import static org.junit.Assert.assertEquals; 9 | import static org.junit.Assert.assertFalse; 10 | import static org.junit.Assert.assertTrue; 11 | import static org.junit.Assert.fail; 12 | 13 | public class ExceptionsRegistryTest { 14 | 15 | @Before 16 | public void setup() { 17 | ExceptionsRegistry.registerIgnorable(InterruptedException.class); 18 | ExceptionsRegistry.registerAbort(AssumptionViolatedException.class); 19 | } 20 | 21 | @After 22 | public void teardown() { 23 | ExceptionsRegistry.clearRegistry(); 24 | } 25 | 26 | @Test 27 | public void testRegistry() { 28 | assertEquals(1, ExceptionsRegistry.ignorables().size()); 29 | assertEquals(1, ExceptionsRegistry.abortables().size()); 30 | assertTrue(ExceptionsRegistry.ignorables().contains(InterruptedException.class)); 31 | assertTrue(ExceptionsRegistry.abortables().contains(AssumptionViolatedException.class)); 32 | } 33 | 34 | @Test 35 | public void ifIgnoreExceptionIsRegistered_thenTestingForIgnoreExceptionShouldReturnTrue() { 36 | assertTrue(ExceptionsRegistry.isIgnorable(new InterruptedException())); 37 | } 38 | 39 | @Test 40 | public void ifIgnoreExceptionIsNotRegistered_thenTestingForIgnoreExceptionShouldReturnFalse() { 41 | assertFalse(ExceptionsRegistry.isIgnorable(new IllegalStateException())); 42 | } 43 | 44 | @Test 45 | public void ifAbortExceptionIsRegistered_thenTestingForAbortExceptionShouldRethrowException() { 46 | AssumptionViolatedException abort = new AssumptionViolatedException("unittest"); 47 | try { 48 | ExceptionsRegistry.reThrowIfAbort(abort); 49 | fail("Expected exception to be re-thrown"); 50 | } catch (AssumptionViolatedException e) { 51 | // expected 52 | } catch (Throwable t) { 53 | fail("Unexpected exception thrown"); 54 | } 55 | } 56 | 57 | @Test 58 | public void ifAbortExceptionIsNotRegistered_thenTestingForAbortExceptionShouldNotRethrowException() { 59 | IllegalStateException exception = new IllegalStateException("unittest"); 60 | try { 61 | ExceptionsRegistry.reThrowIfAbort(exception); 62 | } catch (Throwable t) { 63 | fail("Unexpected exception thrown"); 64 | } 65 | } 66 | } -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/statements/PerformanceEvaluationStatementTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import com.github.noconnor.junitperf.BaseTest; 4 | import com.github.noconnor.junitperf.data.EvaluationContext; 5 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 6 | import com.google.common.collect.ImmutableMap; 7 | import org.junit.AssumptionViolatedException; 8 | import org.junit.Before; 9 | import org.junit.Rule; 10 | import org.junit.Test; 11 | import org.junit.rules.ExpectedException; 12 | import org.mockito.ArgumentCaptor; 13 | import org.mockito.Mock; 14 | 15 | import java.util.concurrent.ThreadFactory; 16 | import java.util.function.Consumer; 17 | 18 | import static java.lang.System.currentTimeMillis; 19 | import static java.util.Collections.emptyMap; 20 | import static org.hamcrest.MatcherAssert.assertThat; 21 | import static org.hamcrest.Matchers.greaterThan; 22 | import static org.hamcrest.Matchers.is; 23 | import static org.hamcrest.Matchers.lessThan; 24 | import static org.hamcrest.Matchers.startsWith; 25 | import static org.junit.Assert.fail; 26 | import static org.mockito.ArgumentMatchers.any; 27 | import static org.mockito.ArgumentMatchers.anyLong; 28 | import static org.mockito.Mockito.doAnswer; 29 | import static org.mockito.Mockito.doThrow; 30 | import static org.mockito.Mockito.never; 31 | import static org.mockito.Mockito.times; 32 | import static org.mockito.Mockito.verify; 33 | import static org.mockito.Mockito.when; 34 | 35 | public class PerformanceEvaluationStatementTest extends BaseTest { 36 | 37 | @Rule 38 | public ExpectedException exception = ExpectedException.none(); 39 | 40 | @Mock 41 | private TestStatement baseStatementMock; 42 | 43 | @Mock 44 | private ThreadFactory threadFactoryMock; 45 | 46 | @Mock 47 | private EvaluationContext contextMock; 48 | 49 | @Mock 50 | private Thread threadMock; 51 | 52 | @Mock 53 | private Consumer listenerMock; 54 | 55 | private PerformanceEvaluationStatement statement; 56 | 57 | @Mock 58 | private StatisticsCalculator statisticsCalculatorMock; 59 | 60 | @Before 61 | public void setup() { 62 | ExceptionsRegistry.clearRegistry(); 63 | initialiseThreadFactoryMock(); 64 | initialiseContext(); 65 | statement = PerformanceEvaluationStatement.builder() 66 | .baseStatement(baseStatementMock) 67 | .statistics(statisticsCalculatorMock) 68 | .threadFactory(threadFactoryMock) 69 | .context(contextMock) 70 | .listener(listenerMock) 71 | .build(); 72 | } 73 | 74 | @Test 75 | public void whenEvaluatingABaseStatement_thenTheCorrectNumberOfThreadsShouldBeStarted() throws Throwable { 76 | when(contextMock.getConfiguredThreads()).thenReturn(10); 77 | statement.runParallelEvaluation(); 78 | verify(threadFactoryMock, times(10)).newThread(any(Runnable.class)); 79 | verify(threadMock, times(10)).start(); 80 | } 81 | 82 | @Test 83 | public void whenEvaluatingABaseStatement_thenTheTestShouldEndWhenTheTestDurationExpires() throws Throwable { 84 | when(contextMock.getConfiguredDuration()).thenReturn(100); 85 | long starTimeNs = currentTimeMillis(); 86 | statement.runParallelEvaluation(); 87 | assertThat((currentTimeMillis() - starTimeNs), is(greaterThan(95L))); 88 | assertThat((currentTimeMillis() - starTimeNs), is(lessThan(3 * 100L))); 89 | verify(threadMock, times(1)).interrupt(); 90 | } 91 | 92 | @Test 93 | public void whenEvaluationCompletes_thenTheContextShouldBeUpdatedWithStatistics() throws Throwable { 94 | statement.runParallelEvaluation(); 95 | verify(contextMock).setStatistics(any(StatisticsCalculator.class)); 96 | } 97 | 98 | @Test 99 | public void whenEvaluationCompletes_thenTheContextShouldBeUpdatedWithFinishTime() throws Throwable { 100 | statement.runParallelEvaluation(); 101 | verify(contextMock).setFinishTimeNs(anyLong()); 102 | } 103 | 104 | @Test 105 | public void whenEvaluationCompletes_thenTheContextValidationShouldBeTriggered() throws Throwable { 106 | statement.runParallelEvaluation(); 107 | verify(contextMock).runValidation(); 108 | } 109 | 110 | @Test 111 | public void whenEvaluationCompletes_thenTheListenerShouldBeNotified() throws Throwable { 112 | statement.runParallelEvaluation(); 113 | verify(listenerMock).accept(null); 114 | } 115 | 116 | @Test 117 | public void whenEvaluationCompletes_andValidationFails_thenTheListenerShouldStillBeNotified() throws Throwable { 118 | when(contextMock.isThroughputAchieved()).thenReturn(false); 119 | try { 120 | statement.runParallelEvaluation(); 121 | fail("Assertion expected during validation"); 122 | } catch (Error e) { 123 | assertThat(e.getMessage(), startsWith("Test throughput threshold not achieved")); 124 | } 125 | verify(listenerMock).accept(null); 126 | } 127 | 128 | @Test 129 | public void whenEvaluationCompletes_andThroughputValidationFails_thenAssertionShouldBeGenerated() throws Throwable { 130 | when(contextMock.isThroughputAchieved()).thenReturn(false); 131 | try { 132 | statement.runParallelEvaluation(); 133 | fail("Assertion expected during validation"); 134 | } catch (Error e) { 135 | assertThat(e.getMessage(), startsWith("Test throughput threshold not achieved")); 136 | } 137 | } 138 | 139 | @Test 140 | public void whenEvaluationCompletes_andMinLatencyValidationFails_thenAssertionShouldBeGenerated() throws Throwable { 141 | when(contextMock.isMinLatencyAchieved()).thenReturn(false); 142 | try { 143 | statement.runParallelEvaluation(); 144 | fail("Assertion expected during validation"); 145 | } catch (Error e) { 146 | assertThat(e.getMessage(), startsWith("Test min latency threshold not achieved")); 147 | } 148 | } 149 | 150 | @Test 151 | public void whenEvaluationCompletes_andMeanLatencyValidationFails_thenAssertionShouldBeGenerated() throws Throwable { 152 | when(contextMock.isMeanLatencyAchieved()).thenReturn(false); 153 | try { 154 | statement.runParallelEvaluation(); 155 | fail("Assertion expected during validation"); 156 | } catch (Error e) { 157 | assertThat(e.getMessage(), startsWith("Test mean latency threshold not achieved")); 158 | } 159 | } 160 | 161 | @Test 162 | public void whenEvaluationCompletes_andMaxLatencyValidationFails_thenAssertionShouldBeGenerated() throws Throwable { 163 | when(contextMock.isMaxLatencyAchieved()).thenReturn(false); 164 | try { 165 | statement.runParallelEvaluation(); 166 | fail("Assertion expected during validation"); 167 | } catch (Error e) { 168 | assertThat(e.getMessage(), startsWith("Test max latency threshold not achieved")); 169 | } 170 | } 171 | 172 | @Test 173 | public void whenEvaluationCompletes_andErrorValidationFails_thenAssertionShouldBeGenerated() throws Throwable { 174 | when(contextMock.isErrorThresholdAchieved()).thenReturn(false); 175 | try { 176 | statement.runParallelEvaluation(); 177 | fail("Assertion expected during validation"); 178 | } catch (Error e) { 179 | assertThat(e.getMessage(), startsWith("Error threshold not achieved")); 180 | } 181 | } 182 | 183 | @Test 184 | public void whenEvaluationCompletes_andPercentileLatencyValidationFails_thenAssertionShouldBeGenerated() throws Throwable { 185 | when(contextMock.getPercentileResults()).thenReturn(ImmutableMap.of(90, true, 95, false)); 186 | try { 187 | statement.runParallelEvaluation(); 188 | fail("Assertion expected during validation"); 189 | } catch (Error e) { 190 | assertThat(e.getMessage(), startsWith("95th Percentile has not achieved required threshold")); 191 | } 192 | } 193 | 194 | @Test 195 | public void whenCreatingEvaluationTasks_thenIsAsyncEvaluationShouldBeChecked() throws Throwable { 196 | statement.runParallelEvaluation(); 197 | verify(contextMock).isAsyncEvaluation(); 198 | } 199 | 200 | @Test 201 | public void whenCreatingEvaluationTasks_andIsAsyncEvaluationIsTrue_thenTaskThreadShouldBeCreated() throws Throwable { 202 | when(contextMock.isAsyncEvaluation()).thenReturn(true); 203 | statement.runParallelEvaluation(); 204 | verify(threadFactoryMock).newThread(any(Runnable.class)); 205 | } 206 | 207 | @Test 208 | public void whenRunningEvaluation_thenStatisticsShouldBeReset() throws Throwable { 209 | statement.runParallelEvaluation(); 210 | statement.runParallelEvaluation(); 211 | statement.runParallelEvaluation(); 212 | verify(statisticsCalculatorMock, times(3)).reset(); 213 | } 214 | 215 | @Test 216 | public void whenBaseStatementThrowsAnAbortException_thenExceptionShouldBeReThrown() throws Throwable { 217 | 218 | ExceptionsRegistry.registerAbort(AssumptionViolatedException.class); 219 | 220 | ArgumentCaptor captor = ArgumentCaptor.forClass(Runnable.class); 221 | when(threadFactoryMock.newThread(captor.capture())).thenReturn(threadMock); 222 | doAnswer((invocation) -> { 223 | captor.getValue().run(); 224 | return null; 225 | }).when(threadMock).start(); 226 | 227 | AssumptionViolatedException abort = new AssumptionViolatedException("unittest"); 228 | doThrow(abort).when(baseStatementMock).evaluate(); 229 | when(contextMock.isAborted()).thenReturn(true); 230 | when(contextMock.getAbortedException()).thenReturn(abort); 231 | 232 | try { 233 | statement.runParallelEvaluation(); 234 | } catch (AssumptionViolatedException e) { 235 | // expected 236 | } catch (Throwable t) { 237 | fail("Unexpected exception"); 238 | } finally { 239 | verify(contextMock).setAbortedException(abort); 240 | verify(listenerMock).accept(null); 241 | verify(contextMock, never()).runValidation(); 242 | } 243 | } 244 | 245 | private void initialiseThreadFactoryMock() { 246 | when(threadFactoryMock.newThread(any(Runnable.class))).thenReturn(threadMock); 247 | } 248 | 249 | private void initialiseContext() { 250 | when(contextMock.getConfiguredThreads()).thenReturn(1); 251 | when(contextMock.getConfiguredDuration()).thenReturn(100); 252 | when(contextMock.isErrorThresholdAchieved()).thenReturn(true); 253 | when(contextMock.isThroughputAchieved()).thenReturn(true); 254 | when(contextMock.isMaxLatencyAchieved()).thenReturn(true); 255 | when(contextMock.isMinLatencyAchieved()).thenReturn(true); 256 | when(contextMock.isMeanLatencyAchieved()).thenReturn(true); 257 | when(contextMock.getPercentileResults()).thenReturn(emptyMap()); 258 | } 259 | 260 | } 261 | -------------------------------------------------------------------------------- /junitperf-core/src/test/java/com/github/noconnor/junitperf/statistics/providers/DescriptiveStatisticsCalculatorTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statistics.providers; 2 | 3 | import com.github.noconnor.junitperf.BaseTest; 4 | import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; 5 | import org.junit.Before; 6 | import org.junit.Test; 7 | import org.mockito.Spy; 8 | 9 | import static java.util.concurrent.TimeUnit.MILLISECONDS; 10 | import static java.util.concurrent.TimeUnit.NANOSECONDS; 11 | import static org.hamcrest.Matchers.is; 12 | import static org.junit.Assert.assertEquals; 13 | import static org.junit.Assert.assertThat; 14 | import static org.mockito.Mockito.verify; 15 | 16 | public class DescriptiveStatisticsCalculatorTest extends BaseTest { 17 | 18 | private DescriptiveStatisticsCalculator evaluator; 19 | 20 | @Spy 21 | private DescriptiveStatistics statsMock; 22 | 23 | @Before 24 | public void setup() { 25 | evaluator = new DescriptiveStatisticsCalculator(statsMock); 26 | } 27 | 28 | @Test 29 | public void whenCallingAddLatencyMeasurement_thenStatsShouldBeUpdated() { 30 | evaluator.addLatencyMeasurement(20345L); 31 | verify(statsMock).addValue(20345L); 32 | } 33 | 34 | @Test 35 | public void whenCallingGetErrorCount_thenValidErrorCountShouldBeReturned() { 36 | evaluator.incrementErrorCount(); 37 | evaluator.incrementErrorCount(); 38 | assertThat(evaluator.getErrorCount(), is(2L)); 39 | } 40 | 41 | @Test 42 | public void whenCallingGetEvaluationCount_thenValidEvaluationCountShouldBeReturned() { 43 | evaluator.incrementEvaluationCount(); 44 | evaluator.incrementEvaluationCount(); 45 | assertThat(evaluator.getEvaluationCount(), is(2L)); 46 | } 47 | 48 | @Test 49 | public void whenCallingGetMaxLatency_thenMaxLatencyShouldBeReturned() { 50 | evaluator.addLatencyMeasurement(10); 51 | evaluator.addLatencyMeasurement(1000); 52 | assertThat(evaluator.getMaxLatency(NANOSECONDS), is(1000F)); 53 | } 54 | 55 | @Test 56 | public void whenCallingGetMaxLatency_thenMaxLatencyShouldBeAdjustedToMatchSpecifiedUnits() { 57 | evaluator.addLatencyMeasurement(10); 58 | evaluator.addLatencyMeasurement(1000); 59 | assertThat(evaluator.getMaxLatency(NANOSECONDS), is(1000F)); 60 | assertThat(evaluator.getMaxLatency(MILLISECONDS), is(0.001000F)); 61 | } 62 | 63 | @Test 64 | public void whenCallingGetMinLatency_thenMinLatencyShouldBeReturned() { 65 | evaluator.addLatencyMeasurement(10); 66 | evaluator.addLatencyMeasurement(1000); 67 | assertThat(evaluator.getMinLatency(NANOSECONDS), is(10F)); 68 | } 69 | 70 | @Test 71 | public void whenCallingGetMinLatency_thenMinLatencyShouldBeAdjustedToMatchSpecifiedUnits() { 72 | evaluator.addLatencyMeasurement(10); 73 | evaluator.addLatencyMeasurement(1000); 74 | assertThat(evaluator.getMinLatency(NANOSECONDS), is(10F)); 75 | assertThat(evaluator.getMinLatency(MILLISECONDS), is(0.00001F)); 76 | } 77 | 78 | @Test 79 | public void whenCallingGetMeanLatency_thenMeanLatencyShouldBeReturned() { 80 | evaluator.addLatencyMeasurement(10); 81 | evaluator.addLatencyMeasurement(1000); 82 | assertThat(evaluator.getMeanLatency(NANOSECONDS), is(505F)); 83 | } 84 | 85 | @Test 86 | public void whenCallingGetMeanLatency_thenMeanLatencyShouldBeAdjustedToMatchSpecifiedUnits() { 87 | evaluator.addLatencyMeasurement(10); 88 | evaluator.addLatencyMeasurement(1000); 89 | assertThat(evaluator.getMeanLatency(NANOSECONDS), is(505F)); 90 | assertThat(evaluator.getMeanLatency(MILLISECONDS), is(0.000505F)); 91 | } 92 | 93 | @Test 94 | public void whenCallingGetPercentile_thenValidPercentileShouldBeReturned() { 95 | evaluator.addLatencyMeasurement(20345L); 96 | assertThat(evaluator.getLatencyPercentile(99, NANOSECONDS), is(20345F)); 97 | } 98 | 99 | @Test 100 | public void whenCallingGetPercentile_thenValidPercentileShouldBeAdjustedToMatchSpecifiedUnits() { 101 | evaluator.addLatencyMeasurement(20345L); 102 | assertThat(evaluator.getLatencyPercentile(99, NANOSECONDS), is(20345F)); 103 | assertThat(evaluator.getLatencyPercentile(99, MILLISECONDS), is(0.020345F)); 104 | } 105 | 106 | @Test 107 | public void whenCallingGetPercentageError_thenPercentageErrorShouldBeCalculated() { 108 | evaluator.incrementErrorCount(); 109 | evaluator.incrementEvaluationCount(); 110 | evaluator.incrementEvaluationCount(); 111 | assertThat(evaluator.getErrorPercentage(), is(50F)); 112 | } 113 | 114 | @Test 115 | public void whenEvaluationCountIsZero_andGetPercentageErrorIsCalled_thenZeroShouldBeReturned() { 116 | assertThat(evaluator.getErrorPercentage(), is(0F)); 117 | } 118 | 119 | @Test 120 | public void whenMeanLatencyIsZero_andGetMeanLatencyIsCalled_thenZeroShouldBeReturned() { 121 | assertThat(evaluator.getMeanLatency(MILLISECONDS), is(0F)); 122 | } 123 | 124 | @Test 125 | public void whenMinLatencyIsZero_andGetMinLatencyIsCalled_thenZeroShouldBeReturned() { 126 | assertThat(evaluator.getMinLatency(MILLISECONDS), is(0F)); 127 | } 128 | 129 | @Test 130 | public void whenMaxLatencyIsZero_andGetMaxLatencyIsCalled_thenZeroShouldBeReturned() { 131 | assertThat(evaluator.getMaxLatency(MILLISECONDS), is(0F)); 132 | } 133 | 134 | @Test 135 | public void whenLatencyPercentilesAreZero_andGetLatencyPercentileIsCalled_thenZeroShouldBeReturned() { 136 | assertThat(evaluator.getLatencyPercentile(90, MILLISECONDS), is(0F)); 137 | } 138 | 139 | @Test 140 | public void whenCreatingANewDescriptiveStatisticsCalculator_thenItShouldBePossibleToSetTheWindowSize() { 141 | evaluator = new DescriptiveStatisticsCalculator(10); 142 | } 143 | 144 | @Test 145 | public void whenResettingStatsCollector_thenStatsShouldBeCleared() { 146 | evaluator.incrementErrorCount(); 147 | evaluator.incrementEvaluationCount(); 148 | assertEquals(1, evaluator.getErrorCount()); 149 | assertEquals(1, evaluator.getEvaluationCount()); 150 | evaluator.reset(); 151 | verify(statsMock).clear(); 152 | assertEquals(0, evaluator.getErrorCount()); 153 | assertEquals(0, evaluator.getEvaluationCount()); 154 | } 155 | } 156 | -------------------------------------------------------------------------------- /junitperf-core/src/test/resources/csv/fail_abort_succeed.csv: -------------------------------------------------------------------------------- 1 | testName,duration,threadCount,throughput,minLatencyNs,maxLatencyNs,meanLatencyNs,1st,2nd,3rd,4th,5th,6th,7th,8th,9th,10th,11th,12th,13th,14th,15th,16th,17th,18th,19th,20th,21st,22nd,23rd,24th,25th,26th,27th,28th,29th,30th,31st,32nd,33rd,34th,35th,36th,37th,38th,39th,40th,41st,42nd,43rd,44th,45th,46th,47th,48th,49th,50th,51st,52nd,53rd,54th,55th,56th,57th,58th,59th,60th,61st,62nd,63rd,64th,65th,66th,67th,68th,69th,70th,71st,72nd,73rd,74th,75th,76th,77th,78th,79th,80th,81st,82nd,83rd,84th,85th,86th,87th,88th,89th,90th,91st,92nd,93rd,94th,95th,96th,97th,98th,99th,100th 2 | unittest1,10000,50,101,12.7000,234.6800,61.7000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,4.6364,48.3435,234.6800 3 | unittest2 (skipped),0,50,0,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000 4 | unittest3,10000,50,13131,1.6364,38.5485,17.5400,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,1.6364,28.3435,38.5485 5 | -------------------------------------------------------------------------------- /junitperf-core/src/test/resources/csv/failed.csv: -------------------------------------------------------------------------------- 1 | testName,duration,threadCount,throughput,minLatencyNs,maxLatencyNs,meanLatencyNs,1st,2nd,3rd,4th,5th,6th,7th,8th,9th,10th,11th,12th,13th,14th,15th,16th,17th,18th,19th,20th,21st,22nd,23rd,24th,25th,26th,27th,28th,29th,30th,31st,32nd,33rd,34th,35th,36th,37th,38th,39th,40th,41st,42nd,43rd,44th,45th,46th,47th,48th,49th,50th,51st,52nd,53rd,54th,55th,56th,57th,58th,59th,60th,61st,62nd,63rd,64th,65th,66th,67th,68th,69th,70th,71st,72nd,73rd,74th,75th,76th,77th,78th,79th,80th,81st,82nd,83rd,84th,85th,86th,87th,88th,89th,90th,91st,92nd,93rd,94th,95th,96th,97th,98th,99th,100th 2 | unittest1,10000,50,101,12.7000,234.6800,61.7000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,4.6364,48.3435,234.6800 3 | unittest2,10000,50,101,12.7000,234.6800,61.7000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,4.6364,48.3435,234.6800 4 | -------------------------------------------------------------------------------- /junitperf-core/src/test/resources/csv/mix.csv: -------------------------------------------------------------------------------- 1 | testName,duration,threadCount,throughput,minLatencyNs,maxLatencyNs,meanLatencyNs,1st,2nd,3rd,4th,5th,6th,7th,8th,9th,10th,11th,12th,13th,14th,15th,16th,17th,18th,19th,20th,21st,22nd,23rd,24th,25th,26th,27th,28th,29th,30th,31st,32nd,33rd,34th,35th,36th,37th,38th,39th,40th,41st,42nd,43rd,44th,45th,46th,47th,48th,49th,50th,51st,52nd,53rd,54th,55th,56th,57th,58th,59th,60th,61st,62nd,63rd,64th,65th,66th,67th,68th,69th,70th,71st,72nd,73rd,74th,75th,76th,77th,78th,79th,80th,81st,82nd,83rd,84th,85th,86th,87th,88th,89th,90th,91st,92nd,93rd,94th,95th,96th,97th,98th,99th,100th 2 | unittest1,10000,50,101,12.7000,234.6800,61.7000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,4.6364,48.3435,234.6800 3 | unittest2,10000,50,13131,1.6364,38.5485,17.5400,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,1.6364,28.3435,38.5485 4 | -------------------------------------------------------------------------------- /junitperf-core/src/test/resources/csv/passed.csv: -------------------------------------------------------------------------------- 1 | testName,duration,threadCount,throughput,minLatencyNs,maxLatencyNs,meanLatencyNs,1st,2nd,3rd,4th,5th,6th,7th,8th,9th,10th,11th,12th,13th,14th,15th,16th,17th,18th,19th,20th,21st,22nd,23rd,24th,25th,26th,27th,28th,29th,30th,31st,32nd,33rd,34th,35th,36th,37th,38th,39th,40th,41st,42nd,43rd,44th,45th,46th,47th,48th,49th,50th,51st,52nd,53rd,54th,55th,56th,57th,58th,59th,60th,61st,62nd,63rd,64th,65th,66th,67th,68th,69th,70th,71st,72nd,73rd,74th,75th,76th,77th,78th,79th,80th,81st,82nd,83rd,84th,85th,86th,87th,88th,89th,90th,91st,92nd,93rd,94th,95th,96th,97th,98th,99th,100th 2 | unittest1,10000,50,13131,1.6364,38.5485,17.5400,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,1.6364,28.3435,38.5485 3 | unittest2,10000,50,13131,1.6364,38.5485,17.5400,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,1.6364,28.3435,38.5485 4 | -------------------------------------------------------------------------------- /junitperf-core/src/test/resources/csv/some_failures.csv: -------------------------------------------------------------------------------- 1 | testName,duration,threadCount,throughput,minLatencyNs,maxLatencyNs,meanLatencyNs,1st,2nd,3rd,4th,5th,6th,7th,8th,9th,10th,11th,12th,13th,14th,15th,16th,17th,18th,19th,20th,21st,22nd,23rd,24th,25th,26th,27th,28th,29th,30th,31st,32nd,33rd,34th,35th,36th,37th,38th,39th,40th,41st,42nd,43rd,44th,45th,46th,47th,48th,49th,50th,51st,52nd,53rd,54th,55th,56th,57th,58th,59th,60th,61st,62nd,63rd,64th,65th,66th,67th,68th,69th,70th,71st,72nd,73rd,74th,75th,76th,77th,78th,79th,80th,81st,82nd,83rd,84th,85th,86th,87th,88th,89th,90th,91st,92nd,93rd,94th,95th,96th,97th,98th,99th,100th 2 | unittest1,10000,50,13131,17.5400,38.5485,28.3435,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,0.0000,1.6364,28.3435,28.3435 3 | -------------------------------------------------------------------------------- /junitperf-junit4/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | junitperf-parent 6 | com.github.noconnor 7 | 1.37.0-SNAPSHOT 8 | 9 | 4.0.0 10 | junitperf 11 | 12 | 13 | 4.13.2 14 | 15 | 16 | 17 | 18 | com.github.noconnor 19 | junitperf-core 20 | 1.37.0-SNAPSHOT 21 | 22 | 23 | junit 24 | junit 25 | ${junit.version} 26 | 27 | 28 | 29 | 30 | 31 | 32 | org.jacoco 33 | jacoco-maven-plugin 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /junitperf-junit4/src/main/java/com/github/noconnor/junitperf/JUnitPerfAsyncRule.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import com.github.noconnor.junitperf.data.EvaluationContext; 4 | import com.github.noconnor.junitperf.data.NoOpTestContext; 5 | import com.github.noconnor.junitperf.data.TestContext; 6 | import com.github.noconnor.junitperf.reporting.ReportGenerator; 7 | import com.github.noconnor.junitperf.reporting.providers.HtmlReportGenerator; 8 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 9 | import com.github.noconnor.junitperf.statistics.providers.DescriptiveStatisticsCalculator; 10 | import org.junit.runner.Description; 11 | import org.junit.runners.model.Statement; 12 | 13 | import static java.lang.System.currentTimeMillis; 14 | import static java.lang.System.nanoTime; 15 | import static java.util.Objects.nonNull; 16 | 17 | @SuppressWarnings("WeakerAccess") 18 | public class JUnitPerfAsyncRule extends JUnitPerfRule { 19 | 20 | private long measurementsStartTimeMs; 21 | 22 | public JUnitPerfAsyncRule() { 23 | this(new DescriptiveStatisticsCalculator(), new HtmlReportGenerator()); 24 | } 25 | 26 | public JUnitPerfAsyncRule(ReportGenerator... reportGenerator) { 27 | this(new DescriptiveStatisticsCalculator(), reportGenerator); 28 | } 29 | 30 | public JUnitPerfAsyncRule(StatisticsCalculator statisticsCalculator) { 31 | this(statisticsCalculator, new HtmlReportGenerator()); 32 | } 33 | 34 | public JUnitPerfAsyncRule(StatisticsCalculator statisticsCalculator, ReportGenerator... reportGenerator) { 35 | super(statisticsCalculator, reportGenerator); 36 | } 37 | 38 | public TestContext newContext() { 39 | return hasMeasurementPeriodStarted() ? new TestContext(statisticsCalculator) : NoOpTestContext.INSTANCE; 40 | } 41 | 42 | @Override 43 | public Statement apply(Statement base, Description description) { 44 | setMeasurementsStartTime(description.getAnnotation(JUnitPerfTest.class)); 45 | return super.apply(base, description); 46 | } 47 | 48 | @Override 49 | EvaluationContext createEvaluationContext(Description description) { 50 | return new EvaluationContext(description.getMethodName(), nanoTime(), true); 51 | } 52 | 53 | private void setMeasurementsStartTime(JUnitPerfTest perfTestAnnotation) { 54 | if (nonNull(perfTestAnnotation)) { 55 | measurementsStartTimeMs = currentTimeMillis() + perfTestAnnotation.warmUpMs(); 56 | } 57 | } 58 | 59 | private boolean hasMeasurementPeriodStarted() { 60 | return currentTimeMillis() >= measurementsStartTimeMs; 61 | } 62 | 63 | } 64 | -------------------------------------------------------------------------------- /junitperf-junit4/src/main/java/com/github/noconnor/junitperf/JUnitPerfRule.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import static java.lang.System.nanoTime; 4 | import static java.util.Objects.nonNull; 5 | import static java.util.stream.Collectors.toSet; 6 | 7 | import com.github.noconnor.junitperf.data.EvaluationContext; 8 | import com.github.noconnor.junitperf.reporting.ReportGenerator; 9 | import com.github.noconnor.junitperf.reporting.providers.HtmlReportGenerator; 10 | import com.github.noconnor.junitperf.statements.DefaultStatement; 11 | import com.github.noconnor.junitperf.statements.ExceptionsRegistry; 12 | import com.github.noconnor.junitperf.statements.MeasurableStatement; 13 | import com.github.noconnor.junitperf.statements.PerformanceEvaluationStatement; 14 | import com.github.noconnor.junitperf.statements.PerformanceEvaluationStatement.PerformanceEvaluationStatementBuilder; 15 | import com.github.noconnor.junitperf.statements.TestStatement; 16 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 17 | import com.github.noconnor.junitperf.statistics.providers.DescriptiveStatisticsCalculator; 18 | import java.util.Arrays; 19 | import java.util.HashMap; 20 | import java.util.LinkedHashSet; 21 | import java.util.Map; 22 | import java.util.Set; 23 | 24 | import org.junit.AssumptionViolatedException; 25 | import org.junit.rules.TestRule; 26 | import org.junit.runner.Description; 27 | import org.junit.runners.model.Statement; 28 | 29 | @SuppressWarnings("WeakerAccess") 30 | public class JUnitPerfRule implements TestRule { 31 | 32 | static final Map, LinkedHashSet> ACTIVE_CONTEXTS = new HashMap<>(); 33 | 34 | static { 35 | ExceptionsRegistry.registerIgnorable(InterruptedException.class); 36 | ExceptionsRegistry.registerAbort(AssumptionViolatedException.class); 37 | } 38 | 39 | private final Set reporters; 40 | 41 | StatisticsCalculator statisticsCalculator; 42 | PerformanceEvaluationStatementBuilder perEvalBuilder; 43 | boolean excludeBeforeAndAfters; 44 | 45 | public JUnitPerfRule() { 46 | this(false); 47 | } 48 | 49 | public JUnitPerfRule(boolean excludeBeforeAndAfters) { 50 | this(excludeBeforeAndAfters, new DescriptiveStatisticsCalculator(), new HtmlReportGenerator()); 51 | } 52 | 53 | public JUnitPerfRule(ReportGenerator... reportGenerator) { 54 | this(false, reportGenerator); 55 | } 56 | 57 | public JUnitPerfRule(boolean excludeBeforeAndAfters, ReportGenerator... reportGenerator) { 58 | this(excludeBeforeAndAfters, new DescriptiveStatisticsCalculator(), reportGenerator); 59 | } 60 | 61 | public JUnitPerfRule(StatisticsCalculator statisticsCalculator) { 62 | this(false, statisticsCalculator); 63 | } 64 | 65 | public JUnitPerfRule(boolean excludeBeforeAndAfters, StatisticsCalculator statisticsCalculator) { 66 | this(excludeBeforeAndAfters, statisticsCalculator, new HtmlReportGenerator()); 67 | } 68 | 69 | public JUnitPerfRule(StatisticsCalculator statisticsCalculator, ReportGenerator... reportGenerator) { 70 | this(false, statisticsCalculator, reportGenerator); 71 | } 72 | 73 | public JUnitPerfRule(boolean excludeBeforeAndAfters, StatisticsCalculator statisticsCalculator, ReportGenerator... reportGenerator) { 74 | this.perEvalBuilder = PerformanceEvaluationStatement.builder(); 75 | this.statisticsCalculator = statisticsCalculator; 76 | this.reporters = Arrays.stream(reportGenerator).collect(toSet()); 77 | this.excludeBeforeAndAfters = excludeBeforeAndAfters; 78 | } 79 | 80 | @Override 81 | public Statement apply(Statement base, Description description) { 82 | Statement activeStatement = base; 83 | 84 | JUnitPerfTest perfTestAnnotation = description.getAnnotation(JUnitPerfTest.class); 85 | JUnitPerfTestRequirement requirementsAnnotation = description.getAnnotation(JUnitPerfTestRequirement.class); 86 | 87 | if (nonNull(perfTestAnnotation)) { 88 | EvaluationContext context = createEvaluationContext(description); 89 | context.loadConfiguration(perfTestAnnotation); 90 | context.loadRequirements(requirementsAnnotation); 91 | 92 | // Group test contexts by test class 93 | ACTIVE_CONTEXTS.putIfAbsent(description.getTestClass(), new LinkedHashSet<>()); 94 | ACTIVE_CONTEXTS.get(description.getTestClass()).add(context); 95 | 96 | TestStatement testStatement = excludeBeforeAndAfters ? new MeasurableStatement(base) : new DefaultStatement(base); 97 | 98 | PerformanceEvaluationStatement parallelExecution = perEvalBuilder.baseStatement(testStatement) 99 | .statistics(statisticsCalculator) 100 | .context(context) 101 | .listener(complete -> updateReport(description.getTestClass())) 102 | .build(); 103 | 104 | activeStatement = new Statement() { 105 | @Override 106 | public void evaluate() throws Throwable { 107 | parallelExecution.runParallelEvaluation(); 108 | } 109 | }; 110 | } 111 | return activeStatement; 112 | } 113 | 114 | EvaluationContext createEvaluationContext(Description description) { 115 | return new EvaluationContext(description.getMethodName(), nanoTime()); 116 | } 117 | 118 | private synchronized void updateReport(Class testClass) { 119 | reporters.forEach(r -> { 120 | r.generateReport(ACTIVE_CONTEXTS.get(testClass)); 121 | }); 122 | } 123 | 124 | } 125 | -------------------------------------------------------------------------------- /junitperf-junit4/src/main/java/com/github/noconnor/junitperf/statements/DefaultStatement.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import org.junit.runners.model.Statement; 4 | 5 | public class DefaultStatement implements TestStatement { 6 | 7 | private final Statement statement; 8 | 9 | public DefaultStatement(Statement statement) { 10 | this.statement = statement; 11 | } 12 | 13 | @Override 14 | public void runBefores() { 15 | // do nothing 16 | } 17 | 18 | @Override 19 | public void evaluate() throws Throwable { 20 | // Underlying statement will run before and after and all latencies/stats will be measured 21 | statement.evaluate(); 22 | } 23 | 24 | @Override 25 | public void runAfters() { 26 | // do nothing 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /junitperf-junit4/src/main/java/com/github/noconnor/junitperf/statements/EmptyStatement.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import org.junit.runners.model.Statement; 4 | 5 | class EmptyStatement extends Statement { 6 | 7 | @Override 8 | public void evaluate() throws Throwable { 9 | // do nothing 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /junitperf-junit4/src/main/java/com/github/noconnor/junitperf/statements/MeasurableStatement.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import static java.util.Objects.nonNull; 4 | 5 | import java.lang.reflect.Field; 6 | import java.util.List; 7 | import org.junit.internal.runners.statements.RunAfters; 8 | import org.junit.internal.runners.statements.RunBefores; 9 | import org.junit.runners.model.FrameworkMethod; 10 | import org.junit.runners.model.Statement; 11 | 12 | public class MeasurableStatement implements TestStatement { 13 | 14 | private RunBefores befores; 15 | private Statement statement; 16 | private RunAfters afters; 17 | 18 | public MeasurableStatement(Statement statement) { 19 | if (statement instanceof RunAfters) { 20 | decompose((RunAfters) statement); 21 | } else if (statement instanceof RunBefores) { 22 | decompose((RunBefores) statement); 23 | } else { 24 | this.befores = null; 25 | this.statement = statement; 26 | this.afters = null; 27 | } 28 | } 29 | 30 | @Override 31 | public void runBefores() throws Throwable { 32 | if (nonNull(befores)) { 33 | befores.evaluate(); 34 | } 35 | } 36 | 37 | @Override 38 | public void evaluate() throws Throwable { 39 | statement.evaluate(); 40 | } 41 | 42 | @Override 43 | public void runAfters() throws Throwable { 44 | if (nonNull(afters)) { 45 | afters.evaluate(); 46 | } 47 | } 48 | 49 | private void decompose(RunBefores befores) { 50 | try { 51 | Statement statement = captureStatement(befores); 52 | this.befores = decomposeBefores(befores); 53 | if (statement instanceof RunAfters) { 54 | this.afters = decomposeAfters((RunAfters) statement); 55 | this.statement = captureStatement((RunAfters) statement); 56 | } else { 57 | this.afters = null; 58 | this.statement = statement; 59 | } 60 | } catch (Exception e) { 61 | throw new IllegalStateException(e); 62 | } 63 | } 64 | 65 | private void decompose(RunAfters afters) { 66 | try { 67 | Statement statement = captureStatement(afters); 68 | this.afters = decomposeAfters(afters); 69 | if (statement instanceof RunBefores) { 70 | this.befores = decomposeBefores((RunBefores) statement); 71 | this.statement = captureStatement((RunBefores) statement); 72 | } else { 73 | this.befores = null; 74 | this.statement = statement; 75 | } 76 | 77 | } catch (Exception e) { 78 | throw new IllegalStateException(e); 79 | } 80 | } 81 | 82 | @SuppressWarnings("unchecked") 83 | private RunBefores decomposeBefores(RunBefores befores) throws NoSuchFieldException, IllegalAccessException { 84 | Field beforesField = RunBefores.class.getDeclaredField("befores"); 85 | Field targetField = RunBefores.class.getDeclaredField("target"); 86 | beforesField.setAccessible(true); 87 | targetField.setAccessible(true); 88 | return new RunBefores( 89 | new EmptyStatement(), 90 | (List) beforesField.get(befores), 91 | targetField.get(befores) 92 | ); 93 | } 94 | 95 | @SuppressWarnings("unchecked") 96 | private RunAfters decomposeAfters(RunAfters afters) throws NoSuchFieldException, IllegalAccessException { 97 | Field aftersField = RunAfters.class.getDeclaredField("afters"); 98 | Field targetField = RunAfters.class.getDeclaredField("target"); 99 | aftersField.setAccessible(true); 100 | targetField.setAccessible(true); 101 | return new RunAfters( 102 | new EmptyStatement(), 103 | (List) aftersField.get(afters), 104 | targetField.get(afters) 105 | ); 106 | } 107 | 108 | private Statement captureStatement(RunAfters afters) throws NoSuchFieldException, IllegalAccessException { 109 | Field nextField = RunAfters.class.getDeclaredField("next"); 110 | nextField.setAccessible(true); 111 | return (Statement) nextField.get(afters); 112 | } 113 | 114 | private Statement captureStatement(RunBefores befores) throws NoSuchFieldException, IllegalAccessException { 115 | Field nextField = RunBefores.class.getDeclaredField("next"); 116 | nextField.setAccessible(true); 117 | return (Statement) nextField.get(befores); 118 | } 119 | 120 | } 121 | -------------------------------------------------------------------------------- /junitperf-junit4/src/test/java/com/github/noconnor/junitperf/JUnitPerfAsyncRuleTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import org.junit.Before; 4 | import org.junit.Test; 5 | import org.junit.runner.Description; 6 | import org.junit.runner.RunWith; 7 | import org.mockito.Mock; 8 | import org.mockito.MockitoAnnotations; 9 | import org.mockito.junit.MockitoJUnitRunner; 10 | import com.github.noconnor.junitperf.data.EvaluationContext; 11 | import com.github.noconnor.junitperf.data.NoOpTestContext; 12 | import com.github.noconnor.junitperf.data.TestContext; 13 | import com.github.noconnor.junitperf.reporting.ReportGenerator; 14 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 15 | 16 | import static org.hamcrest.CoreMatchers.is; 17 | import static org.hamcrest.junit.MatcherAssert.assertThat; 18 | import static org.mockito.Mockito.verify; 19 | import static org.mockito.Mockito.when; 20 | 21 | @RunWith(MockitoJUnitRunner.StrictStubs.class) 22 | public class JUnitPerfAsyncRuleTest { 23 | 24 | @Mock 25 | private StatisticsCalculator statsCollectorMock; 26 | 27 | @Mock 28 | private ReportGenerator reporterMock; 29 | 30 | @Mock 31 | private Description descriptionMock; 32 | 33 | @Mock 34 | private JUnitPerfTest perfTestAnnotationMock; 35 | 36 | private JUnitPerfAsyncRule rule; 37 | 38 | public JUnitPerfAsyncRuleTest() { 39 | MockitoAnnotations.initMocks(this); 40 | } 41 | 42 | @Before 43 | public void setup() { 44 | initialisePerfTestAnnotationMock(); 45 | rule = new JUnitPerfAsyncRule(statsCollectorMock, reporterMock); 46 | } 47 | 48 | @Test 49 | public void whenCallingNewTestContext_thenATestContextIsReturnedThatWrapsTheStatsCollector() { 50 | TestContext context = rule.newContext(); 51 | context.success(); 52 | verify(statsCollectorMock).incrementEvaluationCount(); 53 | } 54 | 55 | @Test 56 | public void whenCallingNewTestContext_andApplyHasBeenCalled_andWarmupPeriodHasNotElapsed_thenANoOpTestContextShouldBeReturned() { 57 | mimicWarmUpPeriod(1_000); 58 | rule.apply(null, descriptionMock); 59 | TestContext context = rule.newContext(); 60 | assertThat(context, is(NoOpTestContext.INSTANCE)); 61 | } 62 | 63 | @Test 64 | public void whenCallingNewTestContext_andApplyHasBeenCalled_andWarmupPeriodHasElapsed_thenAValidTestContextShouldBeReturned() { 65 | mimicWarmUpPeriod(0); 66 | rule.apply(null, descriptionMock); 67 | TestContext context = rule.newContext(); 68 | context.success(); 69 | verify(statsCollectorMock).incrementEvaluationCount(); 70 | } 71 | 72 | @Test 73 | public void whenCallingCreateEvaluationContext_thenContextShouldHaveAsyncFlagSetToTrue() { 74 | EvaluationContext context = rule.createEvaluationContext(descriptionMock); 75 | assertThat(context.isAsyncEvaluation(), is(true)); 76 | } 77 | 78 | @Test 79 | public void whenCallingApply_andNoPerfTestAnnotationIsPresent_thenNoExceptionShouldBeThrown() { 80 | when(descriptionMock.getAnnotation(JUnitPerfTest.class)).thenReturn(null); 81 | rule.apply(null, descriptionMock); 82 | } 83 | 84 | private void mimicWarmUpPeriod(int periodMs) { 85 | when(perfTestAnnotationMock.warmUpMs()).thenReturn(periodMs); 86 | } 87 | 88 | private void initialisePerfTestAnnotationMock() { 89 | when(descriptionMock.getAnnotation(JUnitPerfTest.class)).thenReturn(perfTestAnnotationMock); 90 | when(perfTestAnnotationMock.warmUpMs()).thenReturn(0); 91 | when(perfTestAnnotationMock.durationMs()).thenReturn(10_000); 92 | when(perfTestAnnotationMock.threads()).thenReturn(1); 93 | when(perfTestAnnotationMock.maxExecutionsPerSecond()).thenReturn(-1); 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /junitperf-junit4/src/test/java/com/github/noconnor/junitperf/statements/DefaultStatementTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import static org.mockito.Mockito.verify; 4 | import static org.mockito.Mockito.verifyNoInteractions; 5 | 6 | import junit.framework.TestCase; 7 | import org.junit.Test; 8 | import org.junit.runner.RunWith; 9 | import org.junit.runners.model.Statement; 10 | import org.mockito.Mock; 11 | import org.mockito.junit.MockitoJUnitRunner; 12 | 13 | @RunWith(MockitoJUnitRunner.StrictStubs.class) 14 | public class DefaultStatementTest extends TestCase { 15 | 16 | @Mock 17 | private Statement statementMock; 18 | 19 | @Test 20 | public void whenRunningDefaultStatement_thenUnderlyingStatementShouldBeCalled() throws Throwable { 21 | DefaultStatement defaultStatement = new DefaultStatement(statementMock); 22 | defaultStatement.evaluate(); 23 | verify(statementMock).evaluate(); 24 | } 25 | 26 | @Test 27 | public void whenRunningDefaultStatementBefores_thenNothingShouldHappen() { 28 | DefaultStatement defaultStatement = new DefaultStatement(statementMock); 29 | defaultStatement.runBefores(); 30 | verifyNoInteractions(statementMock); 31 | } 32 | 33 | @Test 34 | public void whenRunningDefaultStatementAfters_thenNothingShouldHappen() { 35 | DefaultStatement defaultStatement = new DefaultStatement(statementMock); 36 | defaultStatement.runAfters(); 37 | verifyNoInteractions(statementMock); 38 | } 39 | } -------------------------------------------------------------------------------- /junitperf-junit5/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | junitperf-parent 6 | com.github.noconnor 7 | 1.37.0-SNAPSHOT 8 | 9 | 4.0.0 10 | junitperf-junit5 11 | 12 | 13 | 5.9.0 14 | 1.9.3 15 | 3.6.28 16 | 17 | 18 | 19 | 20 | com.github.noconnor 21 | junitperf-core 22 | 1.37.0-SNAPSHOT 23 | 24 | 25 | org.junit.jupiter 26 | junit-jupiter 27 | ${junit.jupiter.version} 28 | 29 | 30 | org.mockito 31 | mockito-junit-jupiter 32 | ${mockito.junit.version} 33 | test 34 | 35 | 36 | org.junit.platform 37 | junit-platform-suite-api 38 | ${junit.jupiter.suite.api.version} 39 | test 40 | 41 | 42 | 43 | 44 | 45 | 46 | org.jacoco 47 | jacoco-maven-plugin 48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /junitperf-junit5/src/main/java/com/github/noconnor/junitperf/JUnitPerfReportingConfig.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import java.util.Collection; 4 | import java.util.function.Supplier; 5 | 6 | import com.github.noconnor.junitperf.reporting.ReportGenerator; 7 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 8 | 9 | import com.github.noconnor.junitperf.statistics.providers.DescriptiveStatisticsCalculator; 10 | import lombok.Builder; 11 | import lombok.Singular; 12 | import lombok.Value; 13 | 14 | @Value 15 | @Builder 16 | public class JUnitPerfReportingConfig { 17 | @Singular 18 | Collection reportGenerators; 19 | @Builder.Default 20 | Supplier statisticsCalculatorSupplier = DescriptiveStatisticsCalculator::new; 21 | } 22 | -------------------------------------------------------------------------------- /junitperf-junit5/src/main/java/com/github/noconnor/junitperf/JUnitPerfTestActiveConfig.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | @Retention(RetentionPolicy.RUNTIME) 9 | @Target({ ElementType.FIELD }) 10 | 11 | public @interface JUnitPerfTestActiveConfig { 12 | } 13 | -------------------------------------------------------------------------------- /junitperf-junit5/src/main/java/com/github/noconnor/junitperf/TestContextSupplier.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import com.github.noconnor.junitperf.data.NoOpTestContext; 4 | import com.github.noconnor.junitperf.data.TestContext; 5 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 6 | import lombok.RequiredArgsConstructor; 7 | 8 | import static java.lang.System.currentTimeMillis; 9 | 10 | @RequiredArgsConstructor 11 | public class TestContextSupplier { 12 | 13 | private final long measurementsStartTimeMs; 14 | private final StatisticsCalculator statsCalculator; 15 | 16 | public TestContext startMeasurement() { 17 | return hasMeasurementStarted() ? new TestContext(statsCalculator) : NoOpTestContext.INSTANCE; 18 | } 19 | 20 | private boolean hasMeasurementStarted() { 21 | return currentTimeMillis() >= measurementsStartTimeMs; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /junitperf-junit5/src/main/java/com/github/noconnor/junitperf/statements/FullStatement.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import lombok.Getter; 4 | import lombok.RequiredArgsConstructor; 5 | import lombok.Setter; 6 | 7 | import java.lang.reflect.Method; 8 | import java.util.List; 9 | 10 | import static java.util.Collections.emptyList; 11 | 12 | @RequiredArgsConstructor 13 | public class FullStatement implements TestStatement { 14 | 15 | @Getter 16 | @Setter 17 | private List beforeEach = emptyList(); 18 | @Getter 19 | @Setter 20 | private List afterEach = emptyList(); 21 | 22 | private final Object testClass; 23 | private final Method testMethod; 24 | private final List args; 25 | 26 | @Override 27 | public void runBefores() throws Throwable { 28 | for (Method m : beforeEach) { 29 | m.setAccessible(true); 30 | m.invoke(testClass); 31 | } 32 | } 33 | 34 | @Override 35 | public void evaluate() throws Throwable { 36 | testMethod.setAccessible(true); 37 | testMethod.invoke(testClass, args.toArray()); 38 | } 39 | 40 | @Override 41 | public void runAfters() throws Throwable { 42 | for (Method m : afterEach) { 43 | m.setAccessible(true); 44 | m.invoke(testClass); 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /junitperf-junit5/src/main/java/com/github/noconnor/junitperf/suite/SuiteRegistry.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.suite; 2 | 3 | import com.github.noconnor.junitperf.JUnitPerfReportingConfig; 4 | import com.github.noconnor.junitperf.JUnitPerfTest; 5 | import com.github.noconnor.junitperf.JUnitPerfTestActiveConfig; 6 | import com.github.noconnor.junitperf.JUnitPerfTestRequirement; 7 | import lombok.Builder; 8 | import lombok.Value; 9 | import lombok.extern.slf4j.Slf4j; 10 | import org.junit.jupiter.api.extension.ExtensionContext; 11 | 12 | import java.lang.reflect.Field; 13 | import java.lang.reflect.Modifier; 14 | import java.util.Arrays; 15 | import java.util.HashMap; 16 | import java.util.Map; 17 | import java.util.Objects; 18 | import java.util.regex.Matcher; 19 | import java.util.regex.Pattern; 20 | 21 | import static java.util.Objects.isNull; 22 | import static java.util.Objects.nonNull; 23 | 24 | @Slf4j 25 | public class SuiteRegistry { 26 | 27 | private static final Map settingsCache = new HashMap<>(); 28 | private static final Pattern suiteClassPattern = Pattern.compile("\\[suite:([^\\]]*)\\]"); 29 | 30 | public static void scanForSuiteDetails(ExtensionContext context) { 31 | 32 | String rootUniqueId = getRootId(context); 33 | Class clazz = getTopLevelSuiteClass(rootUniqueId); 34 | 35 | if (isNull(clazz) || settingsCache.containsKey(rootUniqueId)) { 36 | return; 37 | } 38 | 39 | JUnitPerfTest testSpec = clazz.getAnnotation(JUnitPerfTest.class); 40 | JUnitPerfTestRequirement requirements = clazz.getAnnotation(JUnitPerfTestRequirement.class); 41 | JUnitPerfReportingConfig reportingConfig = Arrays.stream(clazz.getFields()) 42 | .filter(f -> f.isAnnotationPresent(JUnitPerfTestActiveConfig.class)) 43 | .map(f -> { 44 | warnIfNonStatic(f); 45 | return getFieldValue(f); 46 | }) 47 | .filter(Objects::nonNull) 48 | .findFirst() 49 | .orElse(null); 50 | SuiteSettings suiteSettings = SuiteSettings.builder() 51 | .perfTestSpec(testSpec) 52 | .requirements(requirements) 53 | .reportingConfig(reportingConfig) 54 | .build(); 55 | 56 | settingsCache.put(rootUniqueId, suiteSettings); 57 | } 58 | 59 | public static void clearRegistry() { 60 | settingsCache.clear(); 61 | } 62 | 63 | public static JUnitPerfReportingConfig getReportingConfig(ExtensionContext context) { 64 | SuiteSettings s = settingsCache.get(getRootId(context)); 65 | return nonNull(s) ? s.getReportingConfig() : null; 66 | } 67 | 68 | public static JUnitPerfTest getPerfTestData(ExtensionContext context) { 69 | SuiteSettings s = settingsCache.get(getRootId(context)); 70 | return nonNull(s) ? s.getPerfTestSpec() : null; 71 | } 72 | 73 | public static JUnitPerfTestRequirement getPerfRequirements(ExtensionContext context) { 74 | SuiteSettings s = settingsCache.get(getRootId(context)); 75 | return nonNull(s) ? s.getRequirements() : null; 76 | } 77 | 78 | private static String getRootId(ExtensionContext context) { 79 | if (nonNull(context) && nonNull(context.getRoot())) { 80 | return context.getRoot().getUniqueId(); 81 | } 82 | return ""; 83 | } 84 | 85 | private static Class getTopLevelSuiteClass(String rootUniqueId) { 86 | Matcher m = suiteClassPattern.matcher(rootUniqueId); 87 | if (m.find()) { // find first match - root suite 88 | try { 89 | return Class.forName(m.group(1)); 90 | } catch (ClassNotFoundException e) { 91 | log.warn("Suite class not found: {}", rootUniqueId); 92 | } 93 | } 94 | return null; 95 | } 96 | 97 | private static JUnitPerfReportingConfig getFieldValue(Field f) { 98 | try { 99 | f.setAccessible(true); 100 | return (JUnitPerfReportingConfig) f.get(null); 101 | } catch (Exception e) { 102 | log.error("Unable to access JUnitPerfReportingConfig, make sure config is a static variable", e); 103 | } 104 | return null; 105 | } 106 | 107 | private static void warnIfNonStatic(Field f) { 108 | if (!Modifier.isStatic(f.getModifiers())) { 109 | log.warn("JUnitPerfReportingConfig must be static for test suites"); 110 | } 111 | } 112 | 113 | @Value 114 | @Builder 115 | private static class SuiteSettings { 116 | JUnitPerfTest perfTestSpec; 117 | JUnitPerfTestRequirement requirements; 118 | JUnitPerfReportingConfig reportingConfig; 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /junitperf-junit5/src/main/java/com/github/noconnor/junitperf/utils/TestReflectionUtils.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.utils; 2 | 3 | import lombok.experimental.UtilityClass; 4 | import org.junit.jupiter.api.AfterEach; 5 | import org.junit.jupiter.api.BeforeEach; 6 | import org.junit.platform.commons.util.ReflectionUtils; 7 | 8 | import java.lang.reflect.Method; 9 | import java.util.List; 10 | 11 | @UtilityClass 12 | public class TestReflectionUtils { 13 | 14 | public static List findBeforeEach(Object testClass) { 15 | return ReflectionUtils.findMethods(testClass.getClass(), m -> m.isAnnotationPresent(BeforeEach.class)); 16 | } 17 | 18 | public static List findAfterEach(Object testClass) { 19 | return ReflectionUtils.findMethods(testClass.getClass(), m -> m.isAnnotationPresent(AfterEach.class)); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /junitperf-junit5/src/main/resources/META-INF/services/org.junit.jupiter.api.extension.Extension: -------------------------------------------------------------------------------- 1 | com.github.noconnor.junitperf.JUnitPerfInterceptor -------------------------------------------------------------------------------- /junitperf-junit5/src/test/java/com/github/noconnor/junitperf/JUnitPerfReportingConfigTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import com.github.noconnor.junitperf.reporting.ReportGenerator; 4 | import com.github.noconnor.junitperf.reporting.providers.ConsoleReportGenerator; 5 | import com.github.noconnor.junitperf.reporting.providers.HtmlReportGenerator; 6 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 7 | import com.github.noconnor.junitperf.statistics.providers.DescriptiveStatisticsCalculator; 8 | import org.junit.jupiter.api.Test; 9 | import org.mockito.junit.jupiter.MockitoSettings; 10 | import org.mockito.quality.Strictness; 11 | 12 | import java.util.function.Supplier; 13 | 14 | import static org.junit.jupiter.api.Assertions.*; 15 | import static org.mockito.Mockito.mock; 16 | import static org.mockito.Mockito.verify; 17 | 18 | @MockitoSettings(strictness = Strictness.LENIENT) 19 | class JUnitPerfReportingConfigTest { 20 | 21 | @Test 22 | void whenNoReportersAreAddedToConfig_thenConfigShouldContainEmptyReportersCollection() { 23 | JUnitPerfReportingConfig config = JUnitPerfReportingConfig.builder() 24 | .build(); 25 | assertEquals(0, config.getReportGenerators().size()); 26 | } 27 | 28 | @Test 29 | void whenReportersAreAddedToConfig_thenReportersShouldBeRetrievable() { 30 | ReportGenerator reporter1 = new ConsoleReportGenerator(); 31 | ReportGenerator reporter2 = new HtmlReportGenerator(); 32 | 33 | JUnitPerfReportingConfig config = JUnitPerfReportingConfig.builder() 34 | .reportGenerator(reporter1) 35 | .reportGenerator(reporter2) 36 | .build(); 37 | 38 | assertEquals(2, config.getReportGenerators().size()); 39 | assertTrue(config.getReportGenerators().contains(reporter1)); 40 | assertTrue(config.getReportGenerators().contains(reporter2)); 41 | } 42 | 43 | @SuppressWarnings("unchecked") 44 | @Test 45 | void whenStatisticsCalculatorSupplierIsSpecified_thenStatisticsCalculatorSupplierShouldBeCallable() { 46 | Supplier calcMock = mock(Supplier.class); 47 | 48 | JUnitPerfReportingConfig config = JUnitPerfReportingConfig.builder() 49 | .statisticsCalculatorSupplier(calcMock) 50 | .build(); 51 | 52 | assertNotNull(config.getStatisticsCalculatorSupplier()); 53 | 54 | config.getStatisticsCalculatorSupplier().get(); 55 | verify(calcMock).get(); 56 | } 57 | 58 | @Test 59 | void whenNoStatisticsCalculatorSupplierIsSpecified_thenDefaultStatisticsCalculatorSupplierShouldBeCallable() { 60 | JUnitPerfReportingConfig config = JUnitPerfReportingConfig.builder() 61 | .build(); 62 | 63 | assertNotNull(config.getStatisticsCalculatorSupplier()); 64 | assertTrue(config.getStatisticsCalculatorSupplier().get() instanceof DescriptiveStatisticsCalculator); 65 | } 66 | 67 | } -------------------------------------------------------------------------------- /junitperf-junit5/src/test/java/com/github/noconnor/junitperf/TestContextSupplierTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf; 2 | 3 | import com.github.noconnor.junitperf.data.NoOpTestContext; 4 | import com.github.noconnor.junitperf.statistics.StatisticsCalculator; 5 | import org.junit.jupiter.api.Test; 6 | import org.mockito.Mock; 7 | import org.mockito.junit.jupiter.MockitoSettings; 8 | import org.mockito.quality.Strictness; 9 | 10 | import static java.lang.System.currentTimeMillis; 11 | import static org.junit.jupiter.api.Assertions.*; 12 | 13 | @MockitoSettings(strictness = Strictness.LENIENT) 14 | class TestContextSupplierTest { 15 | 16 | private TestContextSupplier supplier; 17 | @Mock 18 | private StatisticsCalculator statsCalcMock; 19 | 20 | @Test 21 | void whenMeasurementsHaveNotStarted_thenNoOpContextShouldBeReturned() { 22 | supplier = new TestContextSupplier(currentTimeMillis() + 10_000, statsCalcMock); 23 | assertEquals(NoOpTestContext.INSTANCE, supplier.startMeasurement()); 24 | } 25 | 26 | @Test 27 | void whenMeasurementsHaveStarted_thenNewTestContextShouldBeReturned() { 28 | supplier = new TestContextSupplier(currentTimeMillis() - 10_000, statsCalcMock); 29 | assertNotNull(supplier.startMeasurement()); 30 | assertNotEquals(NoOpTestContext.INSTANCE, supplier.startMeasurement()); 31 | } 32 | 33 | } -------------------------------------------------------------------------------- /junitperf-junit5/src/test/java/com/github/noconnor/junitperf/statements/FullStatementTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.statements; 2 | 3 | import lombok.Getter; 4 | import org.junit.jupiter.api.BeforeEach; 5 | import org.junit.jupiter.api.Test; 6 | 7 | import java.lang.reflect.Method; 8 | import java.util.ArrayList; 9 | import java.util.List; 10 | import java.util.concurrent.atomic.AtomicInteger; 11 | 12 | import static java.util.Collections.emptyList; 13 | import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; 14 | import static org.junit.jupiter.api.Assertions.assertEquals; 15 | 16 | class FullStatementTest { 17 | 18 | private FullStatement statement; 19 | 20 | private MockTestInstance testInstanceMock; 21 | 22 | 23 | @BeforeEach 24 | void setup() throws NoSuchMethodException { 25 | testInstanceMock = new MockTestInstance(); 26 | Method testMethod = MockTestInstance.class.getDeclaredMethod("someMethod"); 27 | statement = new FullStatement(testInstanceMock, testMethod, emptyList()); 28 | } 29 | 30 | @Test 31 | void whenEvaluateIsCalled_thenMethodShouldBeInvoked() throws Throwable { 32 | statement.evaluate(); 33 | assertEquals(1, testInstanceMock.getInvocationCount().get()); 34 | } 35 | 36 | @Test 37 | void whenRunBeforeIsCalled_thenNoBeforeMethodsExist_thenNoExceptionsShouldBeThrown() { 38 | assertDoesNotThrow(() -> statement.runBefores()); 39 | } 40 | 41 | @Test 42 | void whenRunAfterIsCalled_thenNoAfterMethodsExist_thenNoExceptionsShouldBeThrown() { 43 | assertDoesNotThrow(() -> statement.runAfters()); 44 | } 45 | 46 | @Test 47 | void whenRunBeforeIsCalled_thenBeforeMethodsExist_thenMethodsShouldBeCalled() throws NoSuchMethodException { 48 | Method before1Method = MockTestInstance.class.getDeclaredMethod("before1"); 49 | Method before2Method = MockTestInstance.class.getDeclaredMethod("before2"); 50 | List beforeMethods = new ArrayList<>(); 51 | beforeMethods.add(before1Method); 52 | beforeMethods.add(before2Method); 53 | 54 | statement.setBeforeEach(beforeMethods); 55 | assertDoesNotThrow(() -> statement.runBefores()); 56 | assertEquals(1, testInstanceMock.getBefore1Count().get()); 57 | assertEquals(1, testInstanceMock.getBefore2Count().get()); 58 | } 59 | 60 | @Test 61 | void whenRunAfterIsCalled_thenAfterMethodsExist_thenMethodsShouldBeCalled() throws NoSuchMethodException { 62 | Method after1Method = MockTestInstance.class.getDeclaredMethod("after1"); 63 | Method after2Method = MockTestInstance.class.getDeclaredMethod("after2"); 64 | List afterMethods = new ArrayList<>(); 65 | afterMethods.add(after1Method); 66 | afterMethods.add(after2Method); 67 | 68 | statement.setAfterEach(afterMethods); 69 | assertDoesNotThrow(() -> statement.runAfters()); 70 | assertEquals(1, testInstanceMock.getAfter1Count().get()); 71 | assertEquals(1, testInstanceMock.getAfter2Count().get()); 72 | } 73 | 74 | 75 | public static class MockTestInstance { 76 | @Getter 77 | private final AtomicInteger invocationCount = new AtomicInteger(); 78 | @Getter 79 | private final AtomicInteger before1Count = new AtomicInteger(); 80 | @Getter 81 | private final AtomicInteger before2Count = new AtomicInteger(); 82 | @Getter 83 | private final AtomicInteger after1Count = new AtomicInteger(); 84 | @Getter 85 | private final AtomicInteger after2Count = new AtomicInteger(); 86 | 87 | private void someMethod() { 88 | invocationCount.incrementAndGet(); 89 | } 90 | 91 | private void before1() { 92 | before1Count.incrementAndGet(); 93 | } 94 | 95 | private void before2() { 96 | before2Count.incrementAndGet(); 97 | } 98 | 99 | private void after1() { 100 | after1Count.incrementAndGet(); 101 | } 102 | 103 | private void after2() { 104 | after2Count.incrementAndGet(); 105 | } 106 | } 107 | 108 | } -------------------------------------------------------------------------------- /junitperf-junit5/src/test/java/com/github/noconnor/junitperf/suite/SuiteRegistryTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.suite; 2 | 3 | import com.github.noconnor.junitperf.JUnitPerfReportingConfig; 4 | import com.github.noconnor.junitperf.JUnitPerfTest; 5 | import com.github.noconnor.junitperf.JUnitPerfTestActiveConfig; 6 | import com.github.noconnor.junitperf.JUnitPerfTestRequirement; 7 | import org.junit.jupiter.api.BeforeEach; 8 | import org.junit.jupiter.api.Disabled; 9 | import org.junit.jupiter.api.Test; 10 | import org.junit.jupiter.api.extension.ExtensionContext; 11 | import org.junit.platform.suite.api.SelectClasses; 12 | import org.junit.platform.suite.api.Suite; 13 | 14 | import static org.junit.jupiter.api.Assertions.assertEquals; 15 | import static org.junit.jupiter.api.Assertions.assertNotNull; 16 | import static org.junit.jupiter.api.Assertions.assertNull; 17 | import static org.junit.jupiter.api.Assertions.assertTrue; 18 | import static org.mockito.Mockito.mock; 19 | import static org.mockito.Mockito.when; 20 | 21 | public class SuiteRegistryTest { 22 | 23 | @BeforeEach 24 | void setup() { 25 | SuiteRegistry.clearRegistry(); 26 | } 27 | 28 | @Test 29 | void whenNoTestSuiteClassIsConfigured_thenSuiteShouldBeIdentified() { 30 | ExtensionContext context = createMockExtensionContext("[engine:junit-jupiter]"); 31 | SuiteRegistry.scanForSuiteDetails(context); 32 | assertNull(SuiteRegistry.getPerfTestData(context)); 33 | assertNull(SuiteRegistry.getPerfRequirements(context)); 34 | assertNull(SuiteRegistry.getReportingConfig(context)); 35 | } 36 | 37 | @Test 38 | void whenInvalidTestSuiteClassIsConfigured_thenSuiteShouldNotBeIdentified() { 39 | ExtensionContext context = createMockExtensionContext(buildSuiteId("com.does.not.Exist")); 40 | SuiteRegistry.scanForSuiteDetails(context); 41 | assertNull(SuiteRegistry.getPerfTestData(context)); 42 | assertNull(SuiteRegistry.getPerfRequirements(context)); 43 | assertNull(SuiteRegistry.getReportingConfig(context)); 44 | } 45 | 46 | @Test 47 | void whenTestSuiteClassIsConfigured_butSuiteHasNoAnnotations_thenSuiteShouldBeIdentifiedButNoPerfDataShouldBeAvailable() { 48 | ExtensionContext context = createMockExtensionContext(buildSuiteId(DummySuiteNoAnnotations.class)); 49 | SuiteRegistry.scanForSuiteDetails(context); 50 | assertNull(SuiteRegistry.getPerfTestData(context)); 51 | assertNull(SuiteRegistry.getPerfRequirements(context)); 52 | assertNull(SuiteRegistry.getReportingConfig(context)); 53 | } 54 | 55 | @Test 56 | void whenTestSuiteClassIsConfigured_andSuiteHasPerfAnnotation_thenSuitePerfDataShouldBeAvailable() { 57 | ExtensionContext context = createMockExtensionContext(buildSuiteId(DummySuitePerfTestAnnotation.class)); 58 | SuiteRegistry.scanForSuiteDetails(context); 59 | JUnitPerfTest testSpec = SuiteRegistry.getPerfTestData(context); 60 | 61 | assertNotNull(testSpec); 62 | assertNull(SuiteRegistry.getPerfRequirements(context)); 63 | assertNull(SuiteRegistry.getReportingConfig(context)); 64 | assertEquals(40, testSpec.totalExecutions()); 65 | } 66 | 67 | @Test 68 | void whenTestSuiteClassIsConfigured_andSuiteHasAllPerfAnnotations_thenSuitePerfDataShouldBeAvailable() { 69 | ExtensionContext context = createMockExtensionContext(buildSuiteId(DummySuitePerfTestAllAnnotations.class)); 70 | SuiteRegistry.scanForSuiteDetails(context); 71 | JUnitPerfTest testSpec = SuiteRegistry.getPerfTestData(context); 72 | JUnitPerfTestRequirement requirements = SuiteRegistry.getPerfRequirements(context); 73 | 74 | assertNotNull(testSpec); 75 | assertNotNull(requirements); 76 | assertNull(SuiteRegistry.getReportingConfig(context)); 77 | 78 | assertEquals(3, testSpec.totalExecutions()); 79 | assertEquals(0.03F, requirements.allowedErrorPercentage()); 80 | } 81 | 82 | @Test 83 | void whenTestSuiteClassIsConfigured_andSuiteHasAllPerfAnnotationsAndReportingConfig_thenSuitePerfDataShouldBeAvailable() { 84 | ExtensionContext context = createMockExtensionContext(buildSuiteId(DummySuiteAllConfigs.class)); 85 | SuiteRegistry.scanForSuiteDetails(context); 86 | JUnitPerfTest testSpec = SuiteRegistry.getPerfTestData(context); 87 | JUnitPerfTestRequirement requirements = SuiteRegistry.getPerfRequirements(context); 88 | JUnitPerfReportingConfig reportConfig = SuiteRegistry.getReportingConfig(context); 89 | 90 | assertNotNull(testSpec); 91 | assertNotNull(requirements); 92 | assertNotNull(reportConfig); 93 | 94 | assertEquals(53, testSpec.totalExecutions()); 95 | assertEquals(0.13F, requirements.allowedErrorPercentage()); 96 | assertEquals(DummySuiteAllConfigs.config, reportConfig); 97 | } 98 | 99 | @Test 100 | void whenTestSuiteClassIsConfigured_andSuiteHasBadReporterConfig_thenSuitePerfDataShouldBeAvailable_butReporterConfigWillBeMissing() { 101 | ExtensionContext context = createMockExtensionContext(buildSuiteId(DummySuiteBadReporterConfigs.class)); 102 | SuiteRegistry.scanForSuiteDetails(context); 103 | JUnitPerfTest testSpec = SuiteRegistry.getPerfTestData(context); 104 | JUnitPerfTestRequirement requirements = SuiteRegistry.getPerfRequirements(context); 105 | JUnitPerfReportingConfig reportConfig = SuiteRegistry.getReportingConfig(context); 106 | 107 | assertNotNull(testSpec); 108 | assertNotNull(requirements); 109 | assertNull(reportConfig); 110 | 111 | assertEquals(345, testSpec.totalExecutions()); 112 | assertEquals(0.168F, requirements.allowedErrorPercentage()); 113 | } 114 | 115 | @Test 116 | void whenTestSuiteIsASuiteOfSuites_andTopLevelSuiteHasPerfAnnotation_thenSuitePerfDataShouldBeAvailable() { 117 | ExtensionContext context = createMockExtensionContext(buildSuiteOfSuitesId(DummySuiteOfSuites.class, DummySuite.class)); 118 | SuiteRegistry.scanForSuiteDetails(context); 119 | JUnitPerfTest testSpec = SuiteRegistry.getPerfTestData(context); 120 | JUnitPerfTestRequirement requirements = SuiteRegistry.getPerfRequirements(context); 121 | 122 | assertNotNull(testSpec); 123 | assertNotNull(requirements); 124 | assertNotNull(SuiteRegistry.getReportingConfig(context)); 125 | 126 | assertEquals(0.198F, requirements.allowedErrorPercentage()); 127 | assertEquals(376, testSpec.totalExecutions()); 128 | } 129 | 130 | private static String buildSuiteId(Class clazz) { 131 | return buildSuiteId(clazz.getName()); 132 | } 133 | 134 | private static String buildSuiteId(String clazz) { 135 | return "[engine:junit-platform-suite]/[suite:" + clazz + "]/[engine:junit-jupiter]"; 136 | } 137 | 138 | private static String buildSuiteOfSuitesId(Class clazz1, Class clazz2) { 139 | return buildSuiteOfSuitesId(clazz1.getName(), clazz2.getName()); 140 | } 141 | 142 | private static String buildSuiteOfSuitesId(String suiteClazz1, String suiteClazz2) { 143 | return "[engine:junit-platform-suite]/[suite:" + suiteClazz1 + "]/[engine:junit-platform-suite]/[suite:" + suiteClazz2 + "]/[engine:junit-jupiter]"; 144 | } 145 | 146 | private static ExtensionContext createMockExtensionContext(String rootId) { 147 | ExtensionContext childContext = mock(ExtensionContext.class); 148 | ExtensionContext rootContext = mock(ExtensionContext.class); 149 | when(childContext.getRoot()).thenReturn(rootContext); 150 | when(rootContext.getUniqueId()).thenReturn(rootId); 151 | return childContext; 152 | } 153 | 154 | 155 | @Disabled 156 | @Suite 157 | public static class DummySuiteNoAnnotations { 158 | } 159 | 160 | @Disabled 161 | @Suite 162 | @JUnitPerfTest(totalExecutions = 40) 163 | public static class DummySuitePerfTestAnnotation { 164 | } 165 | 166 | @Disabled 167 | @Suite 168 | @JUnitPerfTest(totalExecutions = 3) 169 | @JUnitPerfTestRequirement(allowedErrorPercentage = 0.03F) 170 | public static class DummySuitePerfTestAllAnnotations { 171 | } 172 | 173 | @Disabled 174 | @Suite 175 | @JUnitPerfTest(totalExecutions = 53) 176 | @JUnitPerfTestRequirement(allowedErrorPercentage = 0.13F) 177 | public static class DummySuiteAllConfigs { 178 | @JUnitPerfTestActiveConfig 179 | public static JUnitPerfReportingConfig config = JUnitPerfReportingConfig.builder().build(); 180 | } 181 | 182 | @Disabled 183 | @Suite 184 | @JUnitPerfTest(totalExecutions = 345) 185 | @JUnitPerfTestRequirement(allowedErrorPercentage = 0.168F) 186 | public static class DummySuiteBadReporterConfigs { 187 | @JUnitPerfTestActiveConfig // not static - should be dropped 188 | public JUnitPerfReportingConfig config = JUnitPerfReportingConfig.builder().build(); 189 | } 190 | 191 | @Disabled 192 | @Suite 193 | @SelectClasses(DummySuite.class) 194 | @JUnitPerfTest(totalExecutions = 376) 195 | @JUnitPerfTestRequirement(allowedErrorPercentage = 0.198F) 196 | public static class DummySuiteOfSuites { 197 | @JUnitPerfTestActiveConfig 198 | public static JUnitPerfReportingConfig config = JUnitPerfReportingConfig.builder().build(); 199 | } 200 | 201 | @Disabled 202 | @Suite 203 | @SelectClasses(DummyTestClass.class) 204 | public static class DummySuite { 205 | } 206 | 207 | @Disabled 208 | public static class DummyTestClass { 209 | 210 | @Test 211 | void someTest() { 212 | assertTrue(true); 213 | } 214 | } 215 | } -------------------------------------------------------------------------------- /junitperf-junit5/src/test/java/com/github/noconnor/junitperf/utils/TestReflectionUtilsTest.java: -------------------------------------------------------------------------------- 1 | package com.github.noconnor.junitperf.utils; 2 | 3 | 4 | import org.junit.jupiter.api.AfterEach; 5 | import org.junit.jupiter.api.BeforeEach; 6 | import org.junit.jupiter.api.Disabled; 7 | import org.junit.jupiter.api.Test; 8 | 9 | import java.lang.reflect.Method; 10 | import java.util.List; 11 | 12 | import static java.util.Collections.emptyList; 13 | import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; 14 | import static org.junit.jupiter.api.Assertions.assertEquals; 15 | 16 | public class TestReflectionUtilsTest { 17 | 18 | @Test 19 | void whenClassContainsNoBeforeAndAfterMethods_thenEmptyListShouldBeReturned() { 20 | NoBeforeAfters test = new NoBeforeAfters(); 21 | assertEquals(emptyList(), TestReflectionUtils.findBeforeEach(test)); 22 | assertEquals(emptyList(), TestReflectionUtils.findAfterEach(test)); 23 | } 24 | 25 | @Test 26 | void whenClassContainsBeforeEach_thenNonEmptyBeforeEachListShouldBeReturned() { 27 | BeforeEachClass test = new BeforeEachClass(); 28 | assertEquals(emptyList(), TestReflectionUtils.findAfterEach(test)); 29 | List beforeMethods = TestReflectionUtils.findBeforeEach(test); 30 | assertEquals(1, beforeMethods.size()); 31 | assertEquals("setupBeforeEachClass", beforeMethods.get(0).getName()); 32 | assertDoesNotThrow(() -> beforeMethods.get(0).invoke(test)); 33 | } 34 | 35 | @Test 36 | void whenClassContainsAfterEach_thenNonEmptyAfterEachListShouldBeReturned() { 37 | AfterEachClass test = new AfterEachClass(); 38 | assertEquals(emptyList(), TestReflectionUtils.findBeforeEach(test)); 39 | List afterMethods = TestReflectionUtils.findAfterEach(test); 40 | assertEquals(1, afterMethods.size()); 41 | assertEquals("tearDownAfterEachClass", afterMethods.get(0).getName()); 42 | assertDoesNotThrow(() -> afterMethods.get(0).invoke(test)); 43 | } 44 | 45 | @Test 46 | void whenClassContainsBeforeAndAfterEach_thenNonEmptyBeforeAndAfterEachListShouldBeReturned() { 47 | BeforeAndAfterClass test = new BeforeAndAfterClass(); 48 | List beforeMethods = TestReflectionUtils.findBeforeEach(test); 49 | List afterMethods = TestReflectionUtils.findAfterEach(test); 50 | assertEquals(1, beforeMethods.size()); 51 | assertEquals(1, afterMethods.size()); 52 | assertEquals("setupBeforeAndAfterClass", beforeMethods.get(0).getName()); 53 | assertEquals("tearDownBeforeAndAfterClass", afterMethods.get(0).getName()); 54 | assertDoesNotThrow(() -> beforeMethods.get(0).invoke(test)); 55 | assertDoesNotThrow(() -> afterMethods.get(0).invoke(test)); 56 | } 57 | 58 | @Test 59 | void whenClassContainsMultipleBeforeAndAfterEach_thenNonEmptyBeforeAndAfterEachListShouldBeReturned() { 60 | MultipleBeforeAndAfterClass test = new MultipleBeforeAndAfterClass(); 61 | List beforeMethods = TestReflectionUtils.findBeforeEach(test); 62 | List afterMethods = TestReflectionUtils.findAfterEach(test); 63 | assertEquals(2, beforeMethods.size()); 64 | assertEquals(2, afterMethods.size()); 65 | } 66 | 67 | @Disabled 68 | public static class NoBeforeAfters { 69 | } 70 | 71 | @Disabled 72 | public static class BeforeEachClass { 73 | @BeforeEach 74 | void setupBeforeEachClass(){ 75 | } 76 | } 77 | 78 | @Disabled 79 | public static class AfterEachClass { 80 | @AfterEach 81 | void tearDownAfterEachClass(){ 82 | } 83 | } 84 | 85 | @Disabled 86 | public static class BeforeAndAfterClass { 87 | @BeforeEach 88 | void setupBeforeAndAfterClass(){ 89 | } 90 | @AfterEach 91 | void tearDownBeforeAndAfterClass(){ 92 | } 93 | } 94 | 95 | @Disabled 96 | public static class MultipleBeforeAndAfterClass { 97 | @BeforeEach 98 | void setup1BeforeAndAfterClass(){ 99 | } 100 | @BeforeEach 101 | void setup2BeforeAndAfterClass(){ 102 | } 103 | @AfterEach 104 | void tearDown1BeforeAndAfterClass(){ 105 | } 106 | @AfterEach 107 | void tearDown2BeforeAndAfterClass(){ 108 | } 109 | } 110 | } -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 4.0.0 5 | com.github.noconnor 6 | junitperf-parent 7 | 1.37.0-SNAPSHOT 8 | pom 9 | 10 | junitperf 11 | API performance testing framework built using JUnit 12 | https://github.com/noconnor/JUnitPerf 13 | 14 | 15 | UTF-8 16 | 1.8 17 | 1.8 18 | 3.8.0 19 | 2.0.0.0 20 | 1.18.28 21 | 3.11 22 | 1.3.0 23 | 30.1-jre 24 | 3.2.2 25 | 3.6.1 26 | 0.8.10 27 | 28 | 29 | 30 | junit4-examples 31 | junit5-examples 32 | junitperf-core 33 | junitperf-junit4 34 | junitperf-junit5 35 | 36 | 37 | 38 | 39 | org.mockito 40 | mockito-core 41 | ${mockito.version} 42 | test 43 | 44 | 45 | org.hamcrest 46 | hamcrest-junit 47 | ${hamcrest.version} 48 | test 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | org.apache.maven.plugins 58 | maven-surefire-plugin 59 | 3.0.0-M7 60 | 61 | 62 | maven-deploy-plugin 63 | 2.8.2 64 | 65 | 66 | default-deploy 67 | deploy 68 | 69 | deploy 70 | 71 | 72 | 73 | 74 | 75 | org.apache.maven.plugins 76 | maven-release-plugin 77 | 3.0.0-M1 78 | 79 | release-sign-artifacts 80 | true 81 | false 82 | forked-path 83 | -DskipTests 84 | 85 | 86 | 87 | org.apache.maven.scm 88 | maven-scm-provider-gitexe 89 | 1.11.2 90 | 91 | 92 | org.apache.maven.scm 93 | maven-scm-api 94 | 1.11.2 95 | 96 | 97 | org.apache.maven.release 98 | maven-release-semver-policy 99 | 3.0.0-M1 100 | 101 | 102 | 103 | 104 | org.sonatype.plugins 105 | nexus-staging-maven-plugin 106 | 1.6.8 107 | true 108 | 109 | ossrh 110 | https://oss.sonatype.org/ 111 | true 112 | 113 | 114 | 115 | org.apache.maven.plugins 116 | maven-source-plugin 117 | 3.2.1 118 | 119 | 120 | attach-sources 121 | 122 | jar 123 | 124 | 125 | 126 | 127 | 128 | org.apache.maven.plugins 129 | maven-javadoc-plugin 130 | 3.2.0 131 | 132 | UTF-8 133 | 134 | 135 | 136 | attach-javadoc 137 | 138 | jar 139 | 140 | 141 | 142 | 143 | 144 | org.apache.maven.plugins 145 | maven-gpg-plugin 146 | 3.2.7 147 | 148 | 149 | sign-artifacts 150 | verify 151 | 152 | sign 153 | 154 | 155 | 156 | 157 | --pinentry-mode 158 | loopback 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | org.jacoco 167 | jacoco-maven-plugin 168 | ${jacoco-plugin-version} 169 | 170 | 171 | pre-unit-test 172 | 173 | prepare-agent 174 | 175 | 176 | 177 | post-unit-test 178 | test 179 | 180 | report 181 | 182 | 183 | 184 | check 185 | 186 | check 187 | 188 | 189 | 190 | 191 | BUNDLE 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | noconnor 204 | noconnor 205 | noconnorie@protonmail.com 206 | 207 | 208 | 209 | 210 | scm:git:ssh://git@github.com:noconnor/JUnitPerf.git 211 | scm:git:ssh://git@github.com:noconnor/JUnitPerf.git 212 | git@github.com:noconnor/JUnitPerf.git 213 | 1.17.1 214 | 215 | 216 | 217 | 218 | Apache License, Version 2.0 219 | http://www.apache.org/licenses/LICENSE-2.0.txt 220 | repo 221 | 222 | 223 | 224 | 225 | 226 | junit-perf-ossrh 227 | https://oss.sonatype.org/content/repositories/snapshots 228 | 229 | 230 | ossrh 231 | https://oss.sonatype.org/service/local/staging/deploy/maven2/ 232 | 233 | 234 | 235 | 236 | 237 | --------------------------------------------------------------------------------