├── .circleci └── config.yml ├── .credo.exs ├── .formatter.exs ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ └── please--open-new-issues-in-membranefranework-membrane_core.md └── workflows │ ├── on_issue_opened.yaml │ └── on_pr_opened.yaml ├── .gitignore ├── LICENSE ├── README.md ├── examples ├── advanced.exs ├── attached │ ├── .formatter.exs │ ├── .gitignore │ ├── lib │ │ └── attached.ex │ ├── mix.exs │ ├── mix.lock │ ├── run_attached.exs │ └── start_counter.exs └── simple.exs ├── lib ├── beamchmark.ex └── beamchmark │ ├── formatter.ex │ ├── formatters │ ├── console.ex │ ├── html.ex │ ├── html │ │ └── templates.ex │ └── utils.ex │ ├── math.ex │ ├── scenario.ex │ ├── suite.ex │ ├── suite │ ├── configuration.ex │ ├── cpu │ │ └── cpu_task.ex │ ├── measurements.ex │ ├── measurements │ │ ├── cpu_info.ex │ │ ├── memory_info.ex │ │ └── scheduler_info.ex │ ├── memory │ │ └── memory_task.ex │ ├── scenarios │ │ └── empty_scenario.ex │ └── system_info.ex │ └── utils.ex ├── mix.exs ├── mix.lock ├── priv ├── assets │ ├── css │ │ └── beamchmark.css │ └── js │ │ └── plotly-2.9.0.min.js └── templates │ ├── configuration.html.eex │ ├── index.html.eex │ ├── measurements.html.eex │ └── system.html.eex └── test ├── beamchmark ├── application_test.exs ├── formatter_test.exs └── formatters │ ├── console_test.exs │ ├── html_test.exs │ └── utils_test.exs ├── cpu_test.exs ├── memory_test.exs ├── support ├── invalid_formatter.ex ├── mock_scenario.ex ├── spy_formatter.ex ├── test_utils.ex └── valid_formatter.ex └── test_helper.exs /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | orbs: 3 | elixir: membraneframework/elixir@1 4 | win: circleci/windows@4.1 5 | 6 | jobs: 7 | windows_test: 8 | executor: 9 | name: win/default 10 | size: medium 11 | shell: cmd.exe 12 | 13 | steps: 14 | - checkout 15 | 16 | - restore_cache: 17 | keys: 18 | [choco-cache] 19 | 20 | - run: 21 | name: Install Elixir 22 | command: choco install -y erlang elixir 23 | 24 | - save_cache: 25 | key: choco-cache 26 | paths: 27 | - C:\Users\circleci\AppData\Local\Temp 28 | 29 | - run: 30 | name: Get dependencies 31 | command: | 32 | refreshenv && C:\ProgramData\chocolatey\lib\Elixir\tools\bin\mix local.hex --force && C:\ProgramData\chocolatey\lib\Elixir\tools\bin\mix deps.get 33 | 34 | - run: 35 | name: Run all tests 36 | command: | 37 | refreshenv && C:\ProgramData\chocolatey\lib\Elixir\tools\bin\mix test --warnings-as-errors 38 | 39 | workflows: 40 | version: 2 41 | build: 42 | jobs: 43 | - elixir/build_test: 44 | filters: &filters 45 | tags: 46 | only: /v.*/ 47 | - elixir/test: 48 | filters: 49 | <<: *filters 50 | - elixir/lint: 51 | filters: 52 | <<: *filters 53 | - windows_test: 54 | filters: 55 | <<: *filters 56 | - elixir/hex_publish: 57 | requires: 58 | - elixir/build_test 59 | - elixir/test 60 | - elixir/lint 61 | - windows_test 62 | context: 63 | - Deployment 64 | filters: 65 | branches: 66 | ignore: /.*/ 67 | tags: 68 | only: /v.*/ 69 | 70 | -------------------------------------------------------------------------------- /.credo.exs: -------------------------------------------------------------------------------- 1 | # This file contains the configuration for Credo and you are probably reading 2 | # this after creating it with `mix credo.gen.config`. 3 | # 4 | # If you find anything wrong or unclear in this file, please report an 5 | # issue on GitHub: https://github.com/rrrene/credo/issues 6 | # 7 | %{ 8 | # 9 | # You can have as many configs as you like in the `configs:` field. 10 | configs: [ 11 | %{ 12 | # 13 | # Run any config using `mix credo -C `. If no config name is given 14 | # "default" is used. 15 | # 16 | name: "default", 17 | # 18 | # These are the files included in the analysis: 19 | files: %{ 20 | # 21 | # You can give explicit globs or simply directories. 22 | # In the latter case `**/*.{ex,exs}` will be used. 23 | # 24 | included: [ 25 | "lib/", 26 | "src/", 27 | "test/", 28 | "web/", 29 | "apps/*/lib/", 30 | "apps/*/src/", 31 | "apps/*/test/", 32 | "apps/*/web/" 33 | ], 34 | excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"] 35 | }, 36 | # 37 | # Load and configure plugins here: 38 | # 39 | plugins: [], 40 | # 41 | # If you create your own checks, you must specify the source files for 42 | # them here, so they can be loaded by Credo before running the analysis. 43 | # 44 | requires: [], 45 | # 46 | # If you want to enforce a style guide and need a more traditional linting 47 | # experience, you can change `strict` to `true` below: 48 | # 49 | strict: false, 50 | # 51 | # To modify the timeout for parsing files, change this value: 52 | # 53 | parse_timeout: 5000, 54 | # 55 | # If you want to use uncolored output by default, you can change `color` 56 | # to `false` below: 57 | # 58 | color: true, 59 | # 60 | # You can customize the parameters of any check by adding a second element 61 | # to the tuple. 62 | # 63 | # To disable a check put `false` as second element: 64 | # 65 | # {Credo.Check.Design.DuplicatedCode, false} 66 | # 67 | checks: [ 68 | # 69 | ## Consistency Checks 70 | # 71 | {Credo.Check.Consistency.ExceptionNames, []}, 72 | {Credo.Check.Consistency.LineEndings, []}, 73 | {Credo.Check.Consistency.ParameterPatternMatching, []}, 74 | {Credo.Check.Consistency.SpaceAroundOperators, []}, 75 | {Credo.Check.Consistency.SpaceInParentheses, []}, 76 | {Credo.Check.Consistency.TabsOrSpaces, []}, 77 | 78 | # 79 | ## Design Checks 80 | # 81 | # You can customize the priority of any check 82 | # Priority values are: `low, normal, high, higher` 83 | # 84 | {Credo.Check.Design.AliasUsage, 85 | [priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]}, 86 | # You can also customize the exit_status of each check. 87 | # If you don't want TODO comments to cause `mix credo` to fail, just 88 | # set this value to 0 (zero). 89 | # 90 | {Credo.Check.Design.TagTODO, [exit_status: 0]}, 91 | {Credo.Check.Design.TagFIXME, []}, 92 | 93 | # 94 | ## Readability Checks 95 | # 96 | {Credo.Check.Readability.AliasOrder, [priority: :normal]}, 97 | {Credo.Check.Readability.FunctionNames, []}, 98 | {Credo.Check.Readability.LargeNumbers, []}, 99 | {Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]}, 100 | {Credo.Check.Readability.ModuleAttributeNames, []}, 101 | {Credo.Check.Readability.ModuleDoc, []}, 102 | {Credo.Check.Readability.ModuleNames, []}, 103 | {Credo.Check.Readability.ParenthesesInCondition, []}, 104 | {Credo.Check.Readability.ParenthesesOnZeroArityDefs, parens: true}, 105 | {Credo.Check.Readability.PredicateFunctionNames, []}, 106 | {Credo.Check.Readability.PreferImplicitTry, []}, 107 | {Credo.Check.Readability.RedundantBlankLines, []}, 108 | {Credo.Check.Readability.Semicolons, []}, 109 | {Credo.Check.Readability.SpaceAfterCommas, []}, 110 | {Credo.Check.Readability.StringSigils, []}, 111 | {Credo.Check.Readability.TrailingBlankLine, []}, 112 | {Credo.Check.Readability.TrailingWhiteSpace, []}, 113 | {Credo.Check.Readability.UnnecessaryAliasExpansion, []}, 114 | {Credo.Check.Readability.VariableNames, []}, 115 | {Credo.Check.Readability.WithSingleClause, false}, 116 | 117 | # 118 | ## Refactoring Opportunities 119 | # 120 | {Credo.Check.Refactor.CondStatements, []}, 121 | {Credo.Check.Refactor.CyclomaticComplexity, []}, 122 | {Credo.Check.Refactor.FunctionArity, []}, 123 | {Credo.Check.Refactor.LongQuoteBlocks, []}, 124 | {Credo.Check.Refactor.MapInto, false}, 125 | {Credo.Check.Refactor.MatchInCondition, []}, 126 | {Credo.Check.Refactor.NegatedConditionsInUnless, []}, 127 | {Credo.Check.Refactor.NegatedConditionsWithElse, []}, 128 | {Credo.Check.Refactor.Nesting, []}, 129 | {Credo.Check.Refactor.UnlessWithElse, []}, 130 | {Credo.Check.Refactor.WithClauses, []}, 131 | 132 | # 133 | ## Warnings 134 | # 135 | {Credo.Check.Warning.BoolOperationOnSameValues, []}, 136 | {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []}, 137 | {Credo.Check.Warning.IExPry, []}, 138 | {Credo.Check.Warning.IoInspect, []}, 139 | {Credo.Check.Warning.LazyLogging, false}, 140 | {Credo.Check.Warning.MixEnv, []}, 141 | {Credo.Check.Warning.OperationOnSameValues, []}, 142 | {Credo.Check.Warning.OperationWithConstantResult, []}, 143 | {Credo.Check.Warning.RaiseInsideRescue, []}, 144 | {Credo.Check.Warning.UnusedEnumOperation, []}, 145 | {Credo.Check.Warning.UnusedFileOperation, []}, 146 | {Credo.Check.Warning.UnusedKeywordOperation, []}, 147 | {Credo.Check.Warning.UnusedListOperation, []}, 148 | {Credo.Check.Warning.UnusedPathOperation, []}, 149 | {Credo.Check.Warning.UnusedRegexOperation, []}, 150 | {Credo.Check.Warning.UnusedStringOperation, []}, 151 | {Credo.Check.Warning.UnusedTupleOperation, []}, 152 | {Credo.Check.Warning.UnsafeExec, []}, 153 | 154 | # 155 | # Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`) 156 | 157 | # 158 | # Controversial and experimental checks (opt-in, just replace `false` with `[]`) 159 | # 160 | {Credo.Check.Readability.StrictModuleLayout, 161 | priority: :normal, order: ~w/shortdoc moduledoc behaviour use import require alias/a}, 162 | {Credo.Check.Consistency.MultiAliasImportRequireUse, false}, 163 | {Credo.Check.Consistency.UnusedVariableNames, force: :meaningful}, 164 | {Credo.Check.Design.DuplicatedCode, false}, 165 | {Credo.Check.Readability.AliasAs, false}, 166 | {Credo.Check.Readability.MultiAlias, false}, 167 | {Credo.Check.Readability.Specs, []}, 168 | {Credo.Check.Readability.SinglePipe, false}, 169 | {Credo.Check.Readability.WithCustomTaggedTuple, false}, 170 | {Credo.Check.Refactor.ABCSize, false}, 171 | {Credo.Check.Refactor.AppendSingleItem, false}, 172 | {Credo.Check.Refactor.DoubleBooleanNegation, false}, 173 | {Credo.Check.Refactor.ModuleDependencies, false}, 174 | {Credo.Check.Refactor.NegatedIsNil, false}, 175 | {Credo.Check.Refactor.PipeChainStart, false}, 176 | {Credo.Check.Refactor.VariableRebinding, false}, 177 | {Credo.Check.Warning.LeakyEnvironment, false}, 178 | {Credo.Check.Warning.MapGetUnsafePass, false}, 179 | {Credo.Check.Warning.UnsafeToAtom, false} 180 | 181 | # 182 | # Custom checks can be created using `mix credo.gen.check`. 183 | # 184 | ] 185 | } 186 | ] 187 | } 188 | -------------------------------------------------------------------------------- /.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["{mix,.formatter}.exs", "{config,lib,test,examples}/**/*.{ex,exs}", "*.exs"] 4 | ] 5 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @shuntrho 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/please--open-new-issues-in-membranefranework-membrane_core.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Please, open new issues in membranefranework/membrane_core 3 | about: New issues related to this repo should be opened there 4 | title: "[DO NOT OPEN]" 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | Please, do not open this issue here. Open it in the [membrane_core](https://github.com/membraneframework/membrane_core) repository instead. 11 | 12 | Thanks for helping us grow :) 13 | -------------------------------------------------------------------------------- /.github/workflows/on_issue_opened.yaml: -------------------------------------------------------------------------------- 1 | name: 'Close issue when opened' 2 | on: 3 | issues: 4 | types: 5 | - opened 6 | jobs: 7 | close: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Checkout membrane_core 11 | uses: actions/checkout@v3 12 | with: 13 | repository: membraneframework/membrane_core 14 | - name: Close issue 15 | uses: ./.github/actions/close_issue 16 | with: 17 | GITHUB_TOKEN: ${{ secrets.MEMBRANEFRAMEWORKADMIN_TOKEN }} 18 | ISSUE_URL: ${{ github.event.issue.html_url }} 19 | ISSUE_NUMBER: ${{ github.event.issue.number }} 20 | REPOSITORY: ${{ github.repository }} 21 | -------------------------------------------------------------------------------- /.github/workflows/on_pr_opened.yaml: -------------------------------------------------------------------------------- 1 | name: Add PR to Smackore project board, if the author is from outside Membrane Team 2 | on: 3 | pull_request_target: 4 | types: 5 | - opened 6 | jobs: 7 | maybe_add_to_project_board: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Checkout membrane_core 11 | uses: actions/checkout@v3 12 | with: 13 | repository: membraneframework/membrane_core 14 | - name: Puts PR in "New PRs by community" column in the Smackore project, if the author is from outside Membrane Team 15 | uses: ./.github/actions/add_pr_to_smackore_board 16 | with: 17 | GITHUB_TOKEN: ${{ secrets.MEMBRANEFRAMEWORKADMIN_TOKEN }} 18 | AUTHOR_LOGIN: ${{ github.event.pull_request.user.login }} 19 | PR_URL: ${{ github.event.pull_request.html_url }} 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | benchmark/ 2 | 3 | # The directory Mix will write compiled artifacts to. 4 | /_build/ 5 | 6 | # If you run "mix test --cover", coverage assets end up here. 7 | /cover/ 8 | 9 | # The directory Mix downloads your dependencies sources to. 10 | /deps/ 11 | 12 | # Where third-party dependencies like ExDoc output generated docs. 13 | /doc/ 14 | 15 | # Ignore .fetch files in case you like to edit your project deps locally. 16 | /.fetch 17 | 18 | # If the VM crashes, it generates a dump, let's ignore it too. 19 | erl_crash.dump 20 | 21 | # Also ignore archive artifacts (built via "mix archive.build"). 22 | *.ez 23 | 24 | # Ignore package tarball (built via "mix hex.build"). 25 | beamchmark-*.tar 26 | 27 | # Temporary files, for example, from tests. 28 | /tmp/ 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2020 Software Mansion 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Beamchmark 2 | [![Hex.pm](https://img.shields.io/hexpm/v/beamchmark.svg)](https://hex.pm/packages/beamchmark) 3 | [![API Docs](https://img.shields.io/badge/api-docs-yellow.svg?style=flat)](https://hexdocs.pm/beamchmark) 4 | [![CircleCI](https://circleci.com/gh/membraneframework/beamchmark.svg?style=svg)](https://circleci.com/gh/membraneframework/beamchmark) 5 | 6 | Tool for measuring EVM performance. 7 | 8 | At the moment, the main interest of Beamchmark is scheduler utilization, reductions and the number of context switches. 9 | For more information please refer to API docs. 10 | Currently, Beamchmark is supported on macOS, Linux and partially on Windows. 11 | 12 | ## Beamchmark and Benchee 13 | Beamchmark should be used when you want to measure BEAM performance while it is running your application. 14 | Benchee should be used when you want to benchmark specific function from your code base. 15 | In particular, Benchee will inform you how long your function is executing, while Beamchmark will inform you 16 | how busy BEAM is. 17 | 18 | ## Installation 19 | The package can be installed by adding `beamchmark` to your list of dependencies in `mix.exs`: 20 | 21 | ```elixir 22 | def deps do 23 | [ 24 | {:beamchmark, "~> 1.4.1"} 25 | ] 26 | end 27 | ``` 28 | 29 | ## Usage 30 | 31 | ### Running an application using `Beamchmark.Scenario` 32 | 33 | You create a test scenario by adopting `Beamchmark.Scenario` behaviour in a module. It has to implement `run()` function, which will execute for benchmarking. 34 | 35 | The examples of using `Scenario` are located in the `examples` directory. 36 | To run one of them, simply use the following command: 37 | 38 | ```bash 39 | mix run examples/.exs 40 | ``` 41 | 42 | ### Running Beamchmark in an attached mode 43 | 44 | If you want to measure the performance of an already running BEAM you can run Beamchmark in an attached mode. 45 | However, it is required that the node on which your application is running is a distributed node and has `Beamchmark` added to its dependencies. 46 | 47 | To run an example of Beamchmark in attached mode first start the node, which performance will be measured: 48 | ```bash 49 | cd examples/attached 50 | mix deps.get 51 | elixir --sname counter@localhost -S mix run start_counter.exs 52 | ``` 53 | The node will be visible under `counter@localhost` name. 54 | 55 | Now in another terminal you can start the benchmark: 56 | ```bash 57 | epmd -daemon 58 | mix run examples/attached/run_attached.exs 59 | ``` 60 | 61 | ## Formatters 62 | You can output benchmark results with Beamchmark's built-in formatters or implement a custom one. 63 | Formatters can also compare new results with the previous ones, given they share the same scenario module and 64 | were configured to run for the same amount of time. 65 | 66 | Currently, you can output Beamchmark reports in the following ways: 67 | * `Beamchmark.Formatters.Console` 68 | 69 | This is the default formatter, it will print the report on standard output. 70 | 71 | ```txt 72 | ================ 73 | SYSTEM INFO 74 | ================ 75 | 76 | Elixir version: 1.13.4 77 | OTP version: 24 78 | OS: macOS 79 | Memory: 16 GB 80 | System arch: x86_64-apple-darwin21.3.0 81 | NIF version: 2.16 82 | Cores: 8 83 | 84 | ================ 85 | CONFIGURATION 86 | ================ 87 | 88 | Delay: 5s 89 | Duration: 15s 90 | 91 | ================ 92 | MEASUREMENTS 93 | ================ 94 | 95 | Normal schedulers 96 | -------------------- 97 | 1 0.7462382700312585 74.6% 98 | 2 0.7552131238891551 75.5% 99 | 3 0.7080346117265083 70.8% 100 | 4 0.6840002812013201 68.4% 101 | 5 0.7357487054135822 73.6% 102 | 6 0.7889711402496832 78.9% 103 | 7 0.7053186570052465 70.5% 104 | 8 0.495807853995791 49.6% 105 | Total: 0.7024165804390681 70.2% 106 | 107 | CPU schedulers 108 | -------------------- 109 | 9 0.39409732340500314 39.4% 110 | 10 0.5194739765841625 51.9% 111 | 11 0.45208160433332006 45.2% 112 | 12 0.33614215325750824 33.6% 113 | 13 0.05474778835410803 5.5% 114 | 14 0.31687236471324787 31.7% 115 | 15 0.06046101946449905 6.0% 116 | 16 0.0 0.0% 117 | Total: 0.2667345287639811 26.7% 118 | 119 | IO schedulers 120 | -------------------- 121 | 17 0.0 0.0% 122 | 18 0.0 0.0% 123 | 19 0.0 0.0% 124 | 20 0.0 0.0% 125 | 21 0.0 0.0% 126 | 22 0.0 0.0% 127 | 23 0.0 0.0% 128 | 24 0.0 0.0% 129 | 25 2.7705124922689514e-4 0.0% 130 | 26 0.0 0.0% 131 | Total: 2.7705124922689516e-5 0.0% 132 | 133 | Weighted 134 | -------------------- 135 | 0.9692071705951804 96.9% 136 | 137 | 138 | Reductions 139 | -------------------- 140 | 2847054520 141 | 142 | Context Switches 143 | -------------------- 144 | 717845 145 | 146 | CPU Usage Average 147 | -------------------- 148 | 51.2% 149 | 150 | CPU Usage Per Core 151 | -------------------- 152 | Core: 0 -> 99.81 % 153 | Core: 1 -> 2.84 % 154 | Core: 2 -> 99.81 % 155 | Core: 3 -> 2.57 % 156 | Core: 4 -> 99.82 % 157 | Core: 5 -> 2.23 % 158 | Core: 6 -> 99.88 % 159 | Core: 7 -> 2.6 % 160 | 161 | Memory usage 162 | -------------------- 163 | 3.56 GB 164 | 165 | ================ 166 | NEW MEASUREMENTS 167 | ================ 168 | 169 | Normal schedulers 170 | -------------------- 171 | 1 0.7391849466705548 73.9% -0.007053323360703745 -0.9383378016085686% 172 | 2 0.6451374210660318 64.5% -0.11007570282312329 -14.569536423841058% 173 | 3 0.612116497924041 61.2% -0.09591811380246729 -13.559322033898297% 174 | 4 0.7119528248221814 71.2% 0.027952543620861303 4.093567251461991% 175 | 5 0.7175675964576803 71.8% -0.01818110895590186 -2.4456521739130324% 176 | 6 0.667647106911744 66.8% -0.12132403333793917 -15.335868187579223% 177 | 7 0.7588791891435591 75.9% 0.05356053213831258 7.659574468085111% 178 | 8 0.7007975884343178 70.1% 0.2049897344385268 41.330645161290306% 179 | Total: 0.6941603964287638 69.4% -0.008256184010304257 -1.139601139601126% 180 | 181 | CPU schedulers 182 | -------------------- 183 | 9 0.40317586539492 40.3% 0.009078541989916866 2.284263959390856% 184 | 10 0.0658197960010861 6.6% -0.4536541805830764 -87.28323699421965% 185 | 11 0.207488920931131 20.7% -0.24459268340218907 -54.20353982300885% 186 | 12 0.4070941615062336 40.7% 0.07095200824872538 21.130952380952394% 187 | 13 0.5912324517586194 59.1% 0.5364846634045114 974.5454545454545% 188 | 14 4.213003273973723e-8 0.0% -0.3168723225832151 -100% 189 | 15 0.5185116282961778 51.9% 0.45805060883167875 765.0% 190 | 16 0.014049861167737257 1.4% 0.014049861167737257 nan 191 | Total: 0.27592159089824225 27.6% 0.009187062134261126 3.37078651685394% 192 | 193 | IO schedulers 194 | -------------------- 195 | 17 0.0 0.0% 0.0 0% 196 | 18 0.0 0.0% 0.0 0% 197 | 19 0.0 0.0% 0.0 0% 198 | 20 0.0 0.0% 0.0 0% 199 | 21 0.0 0.0% 0.0 0% 200 | 22 0.0 0.0% 0.0 0% 201 | 23 0.0 0.0% 0.0 0% 202 | 24 0.0 0.0% 0.0 0% 203 | 25 0.0 0.0% -2.7705124922689514e-4 0% 204 | 26 2.2108785953999204e-4 0.0% 2.2108785953999204e-4 0% 205 | Total: 2.2108785953999205e-5 0.0% -5.596338968690311e-6 0% 206 | 207 | Weighted 208 | -------------------- 209 | 0.9700717546422247 97.0% 8.6458404704437e-4 0.10319917440659765% 210 | 211 | 212 | Reductions 213 | -------------------- 214 | 2621243405 -225811115 -7.931394127289138% 215 | 216 | Context Switches 217 | -------------------- 218 | 666449 -51396 -7.159762901462017% 219 | 220 | CPU Usage Average 221 | -------------------- 222 | 51.88% 0.68% 1.34% 223 | 224 | CPU Usage Per Core 225 | -------------------- 226 | Core 0 -> 99.74% -0.07 -0.07 % 227 | Core 1 -> 4.35% 1.51 53.17 % 228 | Core 2 -> 99.83% 0.01 0.01 % 229 | Core 3 -> 3.96% 1.39 53.96 % 230 | Core 4 -> 99.82% 0.01 0.01 % 231 | Core 5 -> 3.75% 1.52 68.44 % 232 | Core 6 -> 99.76% -0.12 -0.12 % 233 | Core 7 -> 3.83% 1.22 46.91 % 234 | 235 | Memory usage 236 | -------------------- 237 | 3.58 GB 21.96 MB 0.6% 238 | ``` 239 | 240 | * `Beamchmark.Formatters.HTML` 241 | 242 | The HTML formatter will save the report to an HTML file. 243 | 244 | ![Screenshot of an HTML report](https://user-images.githubusercontent.com/48837433/172619856-44e1280d-b361-4fb9-941a-11c83bde6e47.png) 245 | 246 | 247 | * Custom formatters 248 | 249 | You can implement your custom formatters by overriding `Beamchmark.Formatter` behaviour. 250 | 251 | ## Copyright and License 252 | Copyright 2021, [Software Mansion](https://swmansion.com/?utm_source=git&utm_medium=readme&utm_campaign=beamchmark) 253 | 254 | [![Software Mansion](https://logo.swmansion.com/logo?color=white&variant=desktop&width=200&tag=membrane-github)](https://swmansion.com/?utm_source=git&utm_medium=readme&utm_campaign=beamchmark) 255 | 256 | Licensed under the [Apache License, Version 2.0](LICENSE) 257 | -------------------------------------------------------------------------------- /examples/advanced.exs: -------------------------------------------------------------------------------- 1 | defmodule AdvancedScenario do 2 | @moduledoc false 3 | 4 | @behaviour Beamchmark.Scenario 5 | 6 | @out_dir __MODULE__ |> Atom.to_string() |> String.trim_leading("Elixir.") 7 | @num_schedulers System.schedulers_online() 8 | @functions [ 9 | &:math.sqrt/1, 10 | &:math.sin/1, 11 | &:math.cos/1, 12 | &:math.tan/1, 13 | &:math.log/1, 14 | &:math.log2/1, 15 | &:math.log10/1, 16 | &:math.erf/1 17 | ] 18 | 19 | @impl true 20 | def run() do 21 | File.mkdir_p!(@out_dir) 22 | 23 | @functions 24 | |> Stream.cycle() 25 | |> Stream.take(@num_schedulers) 26 | |> Task.async_stream( 27 | fn function -> 28 | {:name, name} = Function.info(function, :name) 29 | filename = Atom.to_string(name) <> ".txt" 30 | out_path = Path.join([@out_dir, filename]) 31 | 32 | 1..10_000_000 33 | |> Enum.map_join("\n", &function.(&1)) 34 | |> then(&File.write!(out_path, &1)) 35 | end, 36 | ordered: false, 37 | timeout: :infinity 38 | ) 39 | |> Stream.run() 40 | end 41 | end 42 | 43 | Beamchmark.run(AdvancedScenario, 44 | name: "AdvancedScenario performance - otp 23 vs otp 24", 45 | duration: 15, 46 | delay: 5, 47 | cpu_interval: 500, 48 | memory_interval: 1000, 49 | compare?: true, 50 | output_dir: "beamchmark_output", 51 | formatters: [ 52 | Beamchmark.Formatters.Console, 53 | {Beamchmark.Formatters.HTML, 54 | [output_path: "reports/beamchmark.html", auto_open?: true, inline_assets?: false]} 55 | ] 56 | ) 57 | -------------------------------------------------------------------------------- /examples/attached/.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] 4 | ] 5 | -------------------------------------------------------------------------------- /examples/attached/.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps/ 9 | 10 | # Where third-party dependencies like ExDoc output generated docs. 11 | /doc/ 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore package tarball (built via "mix hex.build"). 23 | attached-*.tar 24 | 25 | # Temporary files, for example, from tests. 26 | /tmp/ 27 | -------------------------------------------------------------------------------- /examples/attached/lib/attached.ex: -------------------------------------------------------------------------------- 1 | defmodule Attached do 2 | @moduledoc false 3 | @startvalue 10000 4 | 5 | def count(number \\ @startvalue) 6 | 7 | def count(0) do 8 | count(@startvalue) 9 | end 10 | 11 | def count(number) do 12 | Integer.pow(number, number) 13 | count(number - 1) 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /examples/attached/mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Attached.MixProject do 2 | use Mix.Project 3 | 4 | def project do 5 | [ 6 | app: :attached, 7 | version: "0.1.0", 8 | elixir: "~> 1.13", 9 | start_permanent: Mix.env() == :prod, 10 | deps: deps() 11 | ] 12 | end 13 | 14 | # Run "mix help compile.app" to learn about applications. 15 | def application do 16 | [ 17 | extra_applications: [:logger] 18 | ] 19 | end 20 | 21 | # Run "mix help deps" to learn about dependencies. 22 | defp deps do 23 | [ 24 | {:beamchmark, git: "https://github.com/membraneframework/beamchmark.git"} 25 | ] 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /examples/attached/mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "beamchmark": {:git, "https://github.com/membraneframework/beamchmark.git", "0e96aff864645d2c4c74bb0967291f74765b0337", []}, 3 | "bunch": {:hex, :bunch, "1.3.1", "f8fe80042f9eb474ef2801ae2c9372f9b13d11e7053265dcfc24b9d912e3750b", [:mix], [], "hexpm", "00e21b16ff9bb698b728a01a2fc4b3bf7fc0e87c4bb9c6e4a442324aa8c5e567"}, 4 | "math": {:hex, :math, "0.7.0", "12af548c3892abf939a2e242216c3e7cbfb65b9b2fe0d872d05c6fb609f8127b", [:mix], [], "hexpm", "7987af97a0c6b58ad9db43eb5252a49fc1dfe1f6d98f17da9282e297f594ebc2"}, 5 | } 6 | -------------------------------------------------------------------------------- /examples/attached/run_attached.exs: -------------------------------------------------------------------------------- 1 | Beamchmark.run_attached(:counter@localhost, duration: 10) 2 | -------------------------------------------------------------------------------- /examples/attached/start_counter.exs: -------------------------------------------------------------------------------- 1 | Attached.count() 2 | -------------------------------------------------------------------------------- /examples/simple.exs: -------------------------------------------------------------------------------- 1 | defmodule SimpleScenario do 2 | @moduledoc false 3 | 4 | @behaviour Beamchmark.Scenario 5 | 6 | @impl true 7 | def run() do 8 | 1..1_000 9 | |> Stream.cycle() 10 | |> Stream.each(fn i -> Integer.pow(i, 2) end) 11 | end 12 | end 13 | 14 | Beamchmark.run(SimpleScenario, duration: 5, delay: 1) 15 | -------------------------------------------------------------------------------- /lib/beamchmark.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark do 2 | @moduledoc """ 3 | Top level module providing `Beamchmark.run/2` and `Beamchmark.run_attached/2` API. 4 | 5 | `#{inspect(__MODULE__)}` measures EVM performance while it is running user `#{inspect(__MODULE__)}.Scenario`. 6 | 7 | # Metrics being measured 8 | 9 | ## Scheduler Utilization 10 | 11 | At the moment, the main interest of `#{inspect(__MODULE__)}` is scheduler utilization which tells 12 | how much given scheduler was busy. 13 | Scheduler is busy when: 14 | * Executing process code 15 | * Executing linked-in driver or NIF code 16 | * Executing BIFs, or any other runtime handling 17 | * Garbage collecting 18 | * Handling any other memory management 19 | 20 | Scheduler utilization is measured using Erlang's [`:scheduler`](`:scheduler`) module which uses `:erlang.statistics/1` 21 | under the hood and it is represented as a floating point value between 0.0 and 1.0 and percent. 22 | 23 | `#{inspect(__MODULE__)}` measures following types of scheduler utilization: 24 | * normal/cpu/io - average utilization of single scheduler of given type 25 | * total normal/cpu/io - average utilization of all schedulers of given type. E.g total normal equals 1.0 when 26 | each of normal schedulers have been active all the time 27 | * total - average utilization of all schedulers 28 | * weighted - average utilization of all schedulers weighted against maximum amount of available CPU time 29 | 30 | For more information please refer to `:erlang.statistics/1` (under `:scheduler_wall_time`) or `:scheduler.utilization/1`. 31 | 32 | ## Other 33 | 34 | Other metrics being measured: 35 | * reductions - total reductions number 36 | * context switches - total context switches number 37 | """ 38 | 39 | alias Beamchmark.Scenario.EmptyScenario 40 | alias Beamchmark.Suite.Configuration 41 | 42 | @default_configuration %Beamchmark.Suite.Configuration{ 43 | duration: 60, 44 | cpu_interval: 1000, 45 | memory_interval: 1000, 46 | delay: 0, 47 | formatters: [Beamchmark.Formatters.Console], 48 | output_dir: Path.join([System.tmp_dir!(), "beamchmark"]), 49 | compare?: true, 50 | attached?: false, 51 | metadata: %{} 52 | } 53 | 54 | @typedoc """ 55 | Configuration for `#{inspect(__MODULE__)}`. 56 | * `name` - name of the benchmark. It can be used by formatters. 57 | * `duration` - time in seconds `#{inspect(__MODULE__)}` will be benchmarking EVM. Defaults to `#{@default_configuration.duration}` seconds. 58 | * `cpu_interval` - time in milliseconds `#{inspect(__MODULE__)}` will be benchmarking cpu usage. Defaults to `#{@default_configuration.cpu_interval}` milliseconds. Needs to be greater than or equal to `interfere_timeout`. 59 | * `memory_interval` - time in milliseconds `#{inspect(__MODULE__)}` will be benchmarking memory usage. Defaults to `#{@default_configuration.memory_interval}` milliseconds. Needs to be greater than or equal to `interfere_timeout`. 60 | * `delay` - time in seconds `#{inspect(__MODULE__)}` will wait after running scenario and before starting benchmarking. Defaults to `#{@default_configuration.delay}` seconds. 61 | * `formatters` - list of formatters that will be applied to the result. By default contains only `#{inspect(@default_configuration.formatters)}`. 62 | * `compare?` - boolean indicating whether formatters should compare results for given scenario with the previous one. Defaults to `#{inspect(@default_configuration.compare?)}.` 63 | * `output_dir` - directory where results of benchmarking will be saved. Defaults to "`beamchmark`" directory under location provided by `System.tmp_dir!/0`. 64 | """ 65 | @type options_t() :: [ 66 | name: String.t(), 67 | duration: pos_integer(), 68 | cpu_interval: pos_integer(), 69 | memory_interval: pos_integer(), 70 | delay: non_neg_integer(), 71 | formatters: [Beamchmark.Formatter.t()], 72 | compare?: boolean(), 73 | output_dir: Path.t() 74 | ] 75 | 76 | @doc """ 77 | Runs scenario and benchmarks EVM performance. 78 | 79 | If `compare?` option equals `true`, invocation of this function will also compare new measurements with the last ones. 80 | Measurements will be compared only if they share the same scenario module, delay and duration. 81 | """ 82 | @spec run(Beamchmark.Scenario.t(), options_t()) :: :ok 83 | def run(scenario, opts \\ []) do 84 | config = Configuration.get_configuration(opts, @default_configuration) 85 | 86 | scenario 87 | |> Beamchmark.Suite.init(config) 88 | |> Beamchmark.Suite.run() 89 | |> tap(fn suite -> :ok = Beamchmark.Suite.save(suite) end) 90 | |> tap(fn suite -> :ok = Beamchmark.Formatter.output(suite) end) 91 | 92 | :ok 93 | end 94 | 95 | @doc """ 96 | Executes `Beamchmark.run/2` on a given node. 97 | 98 | This function can be used to measure performance of an already running node. 99 | The node which we are connecting to has to be a distributed node. 100 | """ 101 | @spec run_attached(node(), options_t()) :: :ok 102 | def run_attached(node_name, opts \\ []) do 103 | Node.start(:beamchmark@localhost, :shortnames) 104 | 105 | unless Node.connect(node_name) == true do 106 | raise "Failed to connect to #{node_name} or the node is not alive." 107 | end 108 | 109 | pid = Node.spawn(node_name, __MODULE__, :run, [EmptyScenario, opts ++ [attached?: true]]) 110 | ref = Process.monitor(pid) 111 | 112 | receive do 113 | {:DOWN, ^ref, _process, _object, _reason} -> 114 | :ok 115 | end 116 | end 117 | end 118 | -------------------------------------------------------------------------------- /lib/beamchmark/formatter.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Formatter do 2 | @moduledoc """ 3 | The module defines a behaviour that will be used to format and output `#{inspect(Beamchmark.Suite)}`. 4 | You can adopt this behaviour to implement custom formatters. 5 | 6 | The module contains helper functions for validating and applying formatters defined in configuration 7 | of `#{inspect(Beamchmark.Suite)}`. 8 | """ 9 | 10 | alias Beamchmark.Suite 11 | 12 | @typedoc """ 13 | Represents a module implementing `#{inspect(__MODULE__)}` behaviour. 14 | """ 15 | @type t :: module() 16 | 17 | @typedoc """ 18 | Options given to formatters (defined by formatters authors). 19 | """ 20 | @type options_t :: Keyword.t() 21 | 22 | @doc """ 23 | Takes the suite and transforms it into some internal representation, that later on will be passed to 24 | `write/2`. 25 | """ 26 | @callback format(Suite.t(), options_t) :: any() 27 | 28 | @doc """ 29 | Works like `format/2`, but can provide additional information by comparing the latest suite with the 30 | previous one (passed as the second argument). 31 | """ 32 | @callback format(Suite.t(), Suite.t(), options_t) :: any() 33 | 34 | @doc """ 35 | Takes the return value of `format/1` or `format/2` and outputs it in a convenient form (stdout, file, UI...). 36 | """ 37 | @callback write(any, options_t) :: :ok 38 | 39 | @doc """ 40 | Takes the suite and uses its formatters to output it. If the suite was configured with `compare?` flag enabled, 41 | the previous suite will be also provided to the formatters. 42 | """ 43 | @spec output(Suite.t()) :: :ok 44 | def output(%Suite{} = suite) do 45 | with true <- suite.configuration.compare?, 46 | {:ok, base_suite} <- Suite.try_load_base(suite) do 47 | output_compare(suite, base_suite) 48 | else 49 | false -> 50 | output_single(suite) 51 | 52 | {:error, posix} -> 53 | Mix.shell().info(""" 54 | Comparison is enabled, but did not found any previous measurements (error: #{inspect(posix)}). 55 | Proceeding with single suite... 56 | """) 57 | 58 | output_single(suite) 59 | end 60 | end 61 | 62 | defp output_single(%Suite{} = suite) do 63 | suite 64 | |> get_formatters() 65 | |> Enum.each(fn {formatter, options} -> 66 | :ok = 67 | suite 68 | |> formatter.format(options) 69 | |> formatter.write(options) 70 | end) 71 | end 72 | 73 | defp output_compare(%Suite{} = suite, %Suite{} = base) do 74 | suite 75 | |> get_formatters() 76 | |> Enum.each(fn {formatter, options} -> 77 | :ok = 78 | suite 79 | |> formatter.format(base, options) 80 | |> formatter.write(options) 81 | end) 82 | end 83 | 84 | defp get_formatters(%Suite{configuration: config}) do 85 | config.formatters 86 | |> Enum.map(fn formatter -> 87 | case formatter do 88 | {module, options} -> {module, options} 89 | module -> {module, []} 90 | end 91 | end) 92 | |> tap(fn formatters -> Enum.each(formatters, &validate/1) end) 93 | end 94 | 95 | defp validate({formatter, options}) do 96 | unless Keyword.keyword?(options) do 97 | raise( 98 | "Options for #{inspect(formatter)} need to be passed as a keyword list. Got: #{inspect(options)}." 99 | ) 100 | end 101 | 102 | implements_formatter? = 103 | formatter.module_info(:attributes) 104 | |> Keyword.get(:behaviour, []) 105 | |> Enum.member?(__MODULE__) 106 | 107 | unless implements_formatter? do 108 | raise "#{inspect(formatter)} does not implement #{inspect(__MODULE__)} behaviour." 109 | end 110 | end 111 | end 112 | -------------------------------------------------------------------------------- /lib/beamchmark/formatters/console.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Formatters.Console do 2 | @moduledoc """ 3 | The module formats `#{inspect(Beamchmark.Suite)}` and outputs it using `Mix.shell/0`. 4 | """ 5 | 6 | @behaviour Beamchmark.Formatter 7 | 8 | alias Beamchmark.Formatters.Utils 9 | alias Beamchmark.Suite.{Configuration, Measurements, SystemInfo} 10 | alias Beamchmark.{Math, Suite} 11 | 12 | @precision 2 13 | 14 | @impl true 15 | def format(%Suite{} = suite, _options) do 16 | benchmark_name = suite.configuration.name 17 | system_info = format_system_info(suite.system_info) 18 | configuration = format_configuration(suite.configuration) 19 | measurements = format_measurements(suite.measurements) 20 | 21 | [benchmark_name, system_info, configuration, measurements] 22 | |> Enum.join("\n") 23 | end 24 | 25 | @impl true 26 | def format(%Suite{} = new_suite, %Suite{} = base_suite, _options) do 27 | benchmark_name = new_suite.configuration.name 28 | system_info = format_system_info(new_suite.system_info) 29 | configuration = format_configuration(new_suite.configuration) 30 | base_measurements = format_measurements(base_suite.measurements) 31 | diff_measurements = Measurements.diff(base_suite.measurements, new_suite.measurements) 32 | new_measurements = format_measurements(new_suite.measurements, diff_measurements) 33 | 34 | [benchmark_name, system_info, configuration, base_measurements, new_measurements] 35 | |> Enum.join("\n") 36 | end 37 | 38 | @impl true 39 | def write(data, _options) do 40 | Mix.shell().info(data) 41 | end 42 | 43 | defp format_system_info(%SystemInfo{} = system_info) do 44 | """ 45 | #{section_header("System info")} 46 | 47 | Elixir version: #{system_info.elixir_version} 48 | OTP version: #{system_info.otp_version} 49 | OS: #{system_info.os} 50 | Memory: #{Utils.format_memory(system_info.mem)} 51 | System arch: #{system_info.arch} 52 | NIF version: #{system_info.nif_version} 53 | Cores: #{system_info.num_cores} 54 | """ 55 | end 56 | 57 | defp format_configuration(%Configuration{} = configuration) do 58 | """ 59 | #{section_header("Configuration")} 60 | 61 | Delay: #{inspect(configuration.delay)}s 62 | Duration: #{inspect(configuration.duration)}s 63 | """ 64 | end 65 | 66 | defp format_measurements(%Measurements{} = measurements) do 67 | """ 68 | #{section_header("Measurements")} 69 | 70 | #{format_scheduler_info(measurements.scheduler_info)} 71 | 72 | #{entry_header("Reductions")} 73 | #{format_numbers(measurements.reductions)} 74 | 75 | #{entry_header("Context Switches")} 76 | #{format_numbers(measurements.context_switches)} 77 | 78 | #{entry_header("CPU Usage Average")} 79 | #{format_numbers(measurements.cpu_info.average_all)}% 80 | 81 | #{entry_header("CPU Usage Per Core")} 82 | #{format_cpu_by_core(measurements.cpu_info.average_by_core)} 83 | 84 | #{entry_header("Memory usage")} 85 | #{Utils.format_memory(measurements.memory_info.average.total, 2)} 86 | """ 87 | end 88 | 89 | defp format_measurements(%Measurements{} = measurements, %Measurements{} = measurements_diff) do 90 | """ 91 | #{section_header("New measurements")} 92 | 93 | #{format_scheduler_info(measurements.scheduler_info, measurements_diff.scheduler_info)} 94 | 95 | #{entry_header("Reductions")} 96 | #{format_numbers(measurements.reductions, measurements_diff.reductions)} 97 | 98 | #{entry_header("Context Switches")} 99 | #{format_numbers(measurements.context_switches, 100 | measurements_diff.context_switches)} 101 | 102 | #{entry_header("CPU Usage Average")} 103 | #{format_cpu_average(measurements.cpu_info.average_all, 104 | measurements_diff.cpu_info.average_all)} 105 | 106 | #{entry_header("CPU Usage Per Core")} 107 | #{format_cpu_by_core(measurements.cpu_info.average_by_core, 108 | measurements_diff.cpu_info.average_by_core)} 109 | 110 | #{entry_header("Memory usage")} 111 | #{format_memory_average(measurements.memory_info.average.total, measurements_diff.memory_info.average.total)} 112 | """ 113 | end 114 | 115 | defp format_cpu_average(cpu_average, cpu_average_diff) do 116 | cpu_old = cpu_average - cpu_average_diff 117 | 118 | cpu_diff_percent = Math.percent_diff(cpu_old, cpu_average) 119 | color = get_color(cpu_average_diff) 120 | 121 | "#{format_numbers(cpu_average)}% #{color} #{format_numbers(cpu_average_diff)}% #{format_numbers(cpu_diff_percent)}#{if cpu_diff_percent != :nan, do: "%"}#{IO.ANSI.reset()}" 122 | end 123 | 124 | defp format_memory_average(memory_average, memory_average_diff) do 125 | memory_old = memory_average - memory_average_diff 126 | 127 | memory_diff_percent = Math.percent_diff(memory_old, memory_average) 128 | color = get_color(memory_average_diff) 129 | 130 | "#{Utils.format_memory(memory_average, 2)} #{color} #{Utils.format_memory(memory_average_diff, 2)} #{format_numbers(memory_diff_percent)}#{if memory_diff_percent != :nan, do: "%"}#{IO.ANSI.reset()}" 131 | end 132 | 133 | defp format_scheduler_info(%Measurements.SchedulerInfo{} = scheduler_info) do 134 | """ 135 | #{entry_header("Normal schedulers")} 136 | #{format_scheduler_entry(scheduler_info.normal)} 137 | Total: #{format_scheduler_entry(scheduler_info.total_normal)} 138 | 139 | #{entry_header("CPU schedulers")} 140 | #{format_scheduler_entry(scheduler_info.cpu)} 141 | Total: #{format_scheduler_entry(scheduler_info.total_cpu)} 142 | 143 | #{entry_header("IO schedulers")} 144 | #{format_scheduler_entry(scheduler_info.io)} 145 | Total: #{format_scheduler_entry(scheduler_info.total_io)} 146 | 147 | #{entry_header("Weighted")} 148 | #{format_scheduler_entry(scheduler_info.weighted)} 149 | """ 150 | end 151 | 152 | defp format_scheduler_info( 153 | %Measurements.SchedulerInfo{} = scheduler_info, 154 | %Measurements.SchedulerInfo{} = scheduler_info_diff 155 | ) do 156 | """ 157 | #{entry_header("Normal schedulers")} 158 | #{format_scheduler_entry(scheduler_info.normal, scheduler_info_diff.normal)} 159 | Total: #{format_scheduler_entry(scheduler_info.total_normal, scheduler_info_diff.total_normal)} 160 | 161 | #{entry_header("CPU schedulers")} 162 | #{format_scheduler_entry(scheduler_info.cpu, scheduler_info_diff.cpu)} 163 | Total: #{format_scheduler_entry(scheduler_info.total_cpu, scheduler_info_diff.total_cpu)} 164 | 165 | #{entry_header("IO schedulers")} 166 | #{format_scheduler_entry(scheduler_info.io, scheduler_info_diff.io)} 167 | Total: #{format_scheduler_entry(scheduler_info.total_io, scheduler_info_diff.total_io)} 168 | 169 | #{entry_header("Weighted")} 170 | #{format_scheduler_entry(scheduler_info.weighted, scheduler_info_diff.weighted)} 171 | """ 172 | end 173 | 174 | defp format_scheduler_entry(sched_usage) when is_map(sched_usage) do 175 | Enum.map_join(sched_usage, "\n", fn {sched_id, {util, percent}} -> 176 | "#{sched_id} #{util} #{percent}%" 177 | end) 178 | end 179 | 180 | # clauses for total and weighted usage 181 | defp format_scheduler_entry({util, percent}) do 182 | "#{util} #{percent}%" 183 | end 184 | 185 | defp format_cpu_by_core(cpu_by_core) do 186 | Enum.map_join(cpu_by_core, "\n", fn {core_id, usage} -> 187 | "Core: #{core_id} -> #{format_numbers(usage)} %" 188 | end) 189 | end 190 | 191 | defp format_cpu_by_core(cpu_by_core, cpu_by_core_diff) do 192 | Enum.map_join(cpu_by_core, "\n", fn {core_id, usage} -> 193 | usage_diff = Map.get(cpu_by_core_diff, core_id) 194 | usage_old = usage - usage_diff 195 | usage_diff_percent = Math.percent_diff(usage_old, usage) 196 | color = get_color(usage_diff) 197 | 198 | "Core #{core_id} -> #{format_numbers(usage)}% #{color} #{format_numbers(usage_diff)} #{format_numbers(usage_diff_percent)} #{if usage_diff_percent != :nan, do: "%"}#{IO.ANSI.reset()}" 199 | end) 200 | end 201 | 202 | defp format_scheduler_entry(sched_usage, sched_usage_diff) 203 | when is_map(sched_usage) and is_map(sched_usage_diff) do 204 | Enum.map_join(sched_usage, "\n", fn {sched_id, {util, percent}} -> 205 | {util_diff, percent_diff} = Map.get(sched_usage_diff, sched_id) 206 | color = get_color(percent_diff) 207 | 208 | "#{sched_id} #{util} #{percent}% #{color} #{util_diff} #{percent_diff}#{if percent_diff != :nan, do: "%"}#{IO.ANSI.reset()}" 209 | end) 210 | end 211 | 212 | defp format_scheduler_entry({util, percent}, {util_diff, percent_diff}) do 213 | color = get_color(util_diff) 214 | 215 | "#{util} #{percent}% #{color} #{util_diff} #{percent_diff}#{if percent_diff != :nan, do: "%"}#{IO.ANSI.reset()}" 216 | end 217 | 218 | defp format_numbers(number) when is_integer(number) or number == :nan, do: "#{number}" 219 | 220 | defp format_numbers(float_value) when is_float(float_value) do 221 | Float.round(float_value, @precision) 222 | end 223 | 224 | defp format_numbers(number, number_diff) when is_integer(number) and is_integer(number_diff) do 225 | color = get_color(number_diff) 226 | # old number = number - number_diff 227 | percent_diff = Math.percent_diff(number - number_diff, number) 228 | 229 | "#{number} #{color} #{number_diff} #{percent_diff}#{if percent_diff != :nan, do: "%"}#{IO.ANSI.reset()}" 230 | end 231 | 232 | defp section_header(text) do 233 | """ 234 | ================ 235 | #{String.upcase(text)} 236 | ================ 237 | """ 238 | |> String.trim() 239 | end 240 | 241 | defp entry_header(text) do 242 | """ 243 | #{text} 244 | -------------------- 245 | """ 246 | |> String.trim() 247 | end 248 | 249 | defp get_color(diff) do 250 | cond do 251 | diff < 0 -> IO.ANSI.green() 252 | diff == 0 -> IO.ANSI.white() 253 | diff > 0 -> IO.ANSI.red() 254 | end 255 | end 256 | end 257 | -------------------------------------------------------------------------------- /lib/beamchmark/formatters/html.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Formatters.HTML do 2 | @moduledoc """ 3 | The module formats `#{inspect(Beamchmark.Suite)}` and outputs it to an HTML file. 4 | """ 5 | 6 | @behaviour Beamchmark.Formatter 7 | 8 | alias __MODULE__.Templates 9 | alias Beamchmark.Suite 10 | alias Beamchmark.Utils 11 | 12 | @default_output_path "index.html" 13 | @default_auto_open true 14 | @default_inline_assets false 15 | 16 | @typedoc """ 17 | Configuration for `#{inspect(__MODULE__)}`. 18 | * `output_path` – path to the file, where the report will be saved. Defaults to `#{inspect(@default_output_path)}`. 19 | * `auto_open?` – if `true`, opens the report in system's default browser. Defaults to `#{inspect(@default_auto_open)}`. 20 | * `inline_assets?` – if `true`, pastes contents of `.css` and `.js` assets directly into HTML. Defaults to `#{inspect(@default_inline_assets)}`. 21 | """ 22 | @type options_t() :: [ 23 | output_path: Path.t(), 24 | auto_open?: boolean(), 25 | inline_assets?: boolean() 26 | ] 27 | 28 | @impl true 29 | def format(%Suite{} = suite, options) do 30 | Templates.index(suite, nil, Keyword.get(options, :inline_assets?, @default_inline_assets)) 31 | end 32 | 33 | @impl true 34 | def format(%Suite{} = new_suite, %Suite{} = base_suite, options) do 35 | Templates.index( 36 | new_suite, 37 | base_suite, 38 | Keyword.get(options, :inline_assets?, @default_inline_assets) 39 | ) 40 | end 41 | 42 | @impl true 43 | def write(content, options) do 44 | output_path = 45 | options |> Keyword.get(:output_path, @default_output_path) |> Path.expand() |> format_path() 46 | 47 | auto_open? = Keyword.get(options, :auto_open?, @default_auto_open) 48 | 49 | dirname = Path.dirname(output_path) 50 | 51 | unless File.exists?(dirname) do 52 | File.mkdir_p!(dirname) 53 | end 54 | 55 | File.write!(output_path, content) 56 | Mix.shell().info("The HTML file was successfully saved under #{output_path}!") 57 | 58 | maybe_open_report(output_path, auto_open?) 59 | end 60 | 61 | defp maybe_open_report(_path_to_html, false), do: :ok 62 | 63 | defp maybe_open_report(path_to_html, true) do 64 | browser = get_browser() 65 | {_, exit_code} = System.cmd(browser, [path_to_html]) 66 | 67 | if exit_code > 0 do 68 | Mix.shell().error("Failed to open report using \"#{browser}\".") 69 | else 70 | Mix.shell().info("Opened report using \"#{browser}\".") 71 | end 72 | end 73 | 74 | defp get_browser() do 75 | case Utils.get_os_name() do 76 | :macOS -> "open" 77 | :Windows -> "explorer" 78 | :Linux -> "xdg-open" 79 | os_name -> raise RuntimeError, message: "Beamchmark not supported for #{os_name}" 80 | end 81 | end 82 | 83 | defp format_path(path) do 84 | case Utils.get_os_name() do 85 | :Windows -> String.replace(path, "/", "\\") 86 | _os -> path 87 | end 88 | end 89 | end 90 | -------------------------------------------------------------------------------- /lib/beamchmark/formatters/html/templates.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Formatters.HTML.Templates do 2 | @moduledoc false 3 | 4 | require EEx 5 | 6 | alias Beamchmark.Formatters.Utils 7 | alias Beamchmark.Scenario 8 | alias Beamchmark.Suite.Measurements.{CpuInfo, MemoryInfo, SchedulerInfo} 9 | 10 | EEx.function_from_file(:def, :index, "priv/templates/index.html.eex", [ 11 | :new_suite, 12 | :base_suite, 13 | :inline_assets? 14 | ]) 15 | 16 | EEx.function_from_file(:def, :configuration, "priv/templates/configuration.html.eex", [ 17 | :configuration 18 | ]) 19 | 20 | EEx.function_from_file(:def, :system, "priv/templates/system.html.eex", [:system_info]) 21 | 22 | EEx.function_from_file(:def, :measurements, "priv/templates/measurements.html.eex", [ 23 | :new_measurements, 24 | :base_measurements 25 | ]) 26 | 27 | @spec format_scenario(Scenario.t()) :: String.t() 28 | def format_scenario(scenario) do 29 | scenario |> Atom.to_string() |> String.trim_leading("Elixir.") 30 | end 31 | 32 | @spec format_scheduler_info(SchedulerInfo.sched_usage_t()) :: %{ 33 | (scheduler_usage_entry :: atom()) => String.t() 34 | } 35 | def format_scheduler_info(scheduler_usage_info) do 36 | sorted_by_ids = 37 | Enum.sort_by(scheduler_usage_info, fn {scheduler_id, _scheduler_usage} -> scheduler_id end) 38 | 39 | %{ 40 | scheduler_ids: 41 | Enum.map_join(sorted_by_ids, ", ", fn {scheduler_id, _usage} -> scheduler_id end), 42 | usage: 43 | Enum.map_join(sorted_by_ids, ", ", fn {_scheduler_id, {usage, _percent_usage}} -> 44 | usage 45 | end), 46 | percent_usage: 47 | Enum.map_join(sorted_by_ids, ", ", fn {_scheduler_id, {_usage, percent_usage}} -> 48 | "\"#{percent_usage}%\"" 49 | end) 50 | } 51 | end 52 | 53 | defp format_float(float) do 54 | Float.round(float, 2) 55 | end 56 | 57 | @spec format_average_cpu_usage([CpuInfo.cpu_snapshot_t()]) :: %{ 58 | (cpu_usage_entry :: atom()) => String.t() 59 | } 60 | def format_average_cpu_usage(cpu_snapshots_reversed) do 61 | cpu_snapshots = Enum.reverse(cpu_snapshots_reversed) 62 | 63 | %{ 64 | average_cpu_usage: 65 | Enum.map_join(cpu_snapshots, ", ", fn %{cpu_usage: _cpu_usage, average_all_cores: avg} -> 66 | format_float(avg) 67 | end), 68 | time: Enum.map_join(cpu_snapshots, ", ", & &1.timestamp) 69 | } 70 | end 71 | 72 | @spec format_cpu_usage_by_core([CpuInfo.cpu_snapshot_t()]) :: %{ 73 | result: [String.t()], 74 | time: String.t(), 75 | cores_number: number() 76 | } 77 | def format_cpu_usage_by_core(cpu_snapshots_reversed) do 78 | result_by_core_timestamp = 79 | Enum.reduce(cpu_snapshots_reversed, %{}, fn %{cpu_usage: cpu_usage, average_all_cores: _avg}, 80 | cpu_usage_acc -> 81 | reduce_cpu_usage(cpu_usage, cpu_usage_acc) 82 | end) 83 | 84 | reversed_result = 85 | Enum.reduce(result_by_core_timestamp, [], fn {_core_id, usage_timestamps}, result -> 86 | [ 87 | Enum.map_join(usage_timestamps, ", ", fn value -> 88 | format_float(value) 89 | end) 90 | | result 91 | ] 92 | end) 93 | 94 | %{ 95 | result: Enum.reverse(reversed_result), 96 | time: Enum.map_join(cpu_snapshots_reversed, ", ", & &1.timestamp), 97 | cores_number: length(reversed_result) 98 | } 99 | end 100 | 101 | @spec format_memory_usage([MemoryInfo.memory_snapshot_t()]) :: %{ 102 | (memory_usage_entry :: atom()) => String.t() 103 | } 104 | def format_memory_usage(memory_snapshots_reversed) do 105 | memory_snapshots = Enum.reverse(memory_snapshots_reversed) 106 | 107 | %{ 108 | memory_usage: 109 | Enum.map_join(memory_snapshots, ", ", fn %{total: total_bytes} -> total_bytes end), 110 | time: Enum.map_join(memory_snapshots, ", ", & &1.timestamp) 111 | } 112 | end 113 | 114 | defp reduce_cpu_usage(cpu_usage, cpu_usage_acc) do 115 | Enum.reduce(cpu_usage, cpu_usage_acc, fn {core_id, cpu_usage}, cpu_usage_acc -> 116 | Map.update( 117 | cpu_usage_acc, 118 | core_id, 119 | [cpu_usage], 120 | &[cpu_usage | &1] 121 | ) 122 | end) 123 | end 124 | 125 | @spec was_busy?(SchedulerInfo.sched_usage_t()) :: boolean() 126 | def was_busy?(scheduler_usage_info) do 127 | Enum.any?(scheduler_usage_info, fn {_scheduler_id, {usage, _percent_usage}} -> usage > 0 end) 128 | end 129 | 130 | @spec as_downcase_atom(String.t()) :: atom() 131 | def as_downcase_atom(metric), do: metric |> String.downcase() |> String.to_existing_atom() 132 | end 133 | -------------------------------------------------------------------------------- /lib/beamchmark/formatters/utils.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Formatters.Utils do 2 | @moduledoc """ 3 | The module provides functions common for multiple formatters. 4 | """ 5 | 6 | @doc """ 7 | Takes memory in bytes and returns it as human-readable string. 8 | """ 9 | @spec format_memory(integer() | :unknown, non_neg_integer()) :: String.t() 10 | def format_memory(mem, decimal_places \\ 0) 11 | 12 | def format_memory(mem, decimal_places) when is_integer(mem) and mem > 0 do 13 | log_mem = Math.log(mem, 1024) 14 | 15 | div_and_round = fn num, power -> 16 | (num / Math.pow(1024, power)) 17 | |> Float.round(decimal_places) 18 | |> (&if(round(&1) == &1, do: round(&1), else: &1)).() 19 | |> to_string() 20 | end 21 | 22 | cond do 23 | log_mem >= 4 -> div_and_round.(mem, 4) <> " TB" 24 | log_mem >= 3 -> div_and_round.(mem, 3) <> " GB" 25 | log_mem >= 2 -> div_and_round.(mem, 2) <> " MB" 26 | log_mem >= 1 -> div_and_round.(mem, 1) <> " KB" 27 | true -> "#{mem} B" 28 | end 29 | end 30 | 31 | def format_memory(mem, _dp) when is_integer(mem) and mem < 0 do 32 | "-" <> format_memory(-mem) 33 | end 34 | 35 | def format_memory(0, _dp) do 36 | "-" 37 | end 38 | 39 | def format_memory(:unknown, _dp) do 40 | "-" 41 | end 42 | end 43 | -------------------------------------------------------------------------------- /lib/beamchmark/math.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Math do 2 | @moduledoc """ 3 | The module contains helper math types and utility functions. 4 | """ 5 | 6 | @typedoc """ 7 | Represents a percent. 8 | """ 9 | @type percent_t() :: float() 10 | 11 | @typedoc """ 12 | Represents a percent difference. 13 | 14 | This can be either `t:percent_t/0` or `:nan` when trying to compare value with 0. 15 | """ 16 | @type percent_diff_t() :: percent_t() | :nan 17 | 18 | @spec percent_diff(number(), number()) :: percent_diff_t() 19 | def percent_diff(base, new) do 20 | cond do 21 | base == new -> 22 | # if both values are the same return 0, in other case check against nan 23 | 0 24 | 25 | base == 0 -> 26 | # cannot count when base is 0 27 | :nan 28 | 29 | new == 0 -> 30 | # new value is 100% lower than the base one 31 | -100 32 | 33 | true -> 34 | new / base * 100 - 100 35 | end 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /lib/beamchmark/scenario.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Scenario do 2 | @moduledoc """ 3 | Scenario to run during benchmarking. Defines a behaviour that needs to be adopted by benchmarked modules. 4 | 5 | `Beamchmark` will call the implementation of `run/0` in a new process, shutting it down once it completes all 6 | measurements. The implementation should run for a longer period of time (possibly infinite) than measurements, 7 | so that the EVM isn't benchmarked while it's idle. For the same reason, it is recommended to `raise` immediately 8 | in case the implementation fails. 9 | """ 10 | 11 | @typedoc """ 12 | Represents a module implementing `#{inspect(__MODULE__)}` behaviour. 13 | """ 14 | @type t :: module() 15 | 16 | @doc """ 17 | The function that will be called during benchmarking. 18 | """ 19 | @callback run() :: any() 20 | end 21 | -------------------------------------------------------------------------------- /lib/beamchmark/suite.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Suite do 2 | @moduledoc """ 3 | The module defines a struct representing a single run of benchmark. It is also responsible for running the 4 | benchmark and saving/loading the results. 5 | 6 | The results are serialized and stored in `output_dir / scenario name / delay_duration` directory, where 7 | `scenario name` is the name of module implementing scenario (without separating dots) and `output_dir`, 8 | `delay`, `duration` are fetched from the suite's configuration. 9 | """ 10 | 11 | alias Beamchmark.Scenario 12 | alias __MODULE__.{Configuration, SystemInfo, Measurements} 13 | 14 | @type t :: %__MODULE__{ 15 | scenario: Scenario.t() | nil, 16 | configuration: Configuration.t(), 17 | system_info: SystemInfo.t(), 18 | measurements: Measurements.t() | nil 19 | } 20 | 21 | @enforce_keys [ 22 | :scenario, 23 | :configuration, 24 | :system_info, 25 | :measurements 26 | ] 27 | defstruct @enforce_keys 28 | 29 | @suite_filename "suite" 30 | @old_suite_filename "suite_old" 31 | 32 | @spec init(Configuration.t()) :: t() 33 | def init(%Configuration{} = configuration) do 34 | %__MODULE__{ 35 | scenario: nil, 36 | configuration: configuration, 37 | system_info: SystemInfo.init(), 38 | measurements: nil 39 | } 40 | end 41 | 42 | @spec init(Scenario.t(), Configuration.t()) :: t() 43 | def init(scenario, %Configuration{} = configuration) do 44 | implements_scenario? = 45 | scenario.module_info(:attributes) 46 | |> Keyword.get(:behaviour, []) 47 | |> Enum.member?(Scenario) 48 | 49 | unless implements_scenario? do 50 | raise "#{inspect(scenario)} is not a module implementing #{inspect(Scenario)} behaviour." 51 | end 52 | 53 | %__MODULE__{ 54 | scenario: scenario, 55 | configuration: configuration, 56 | system_info: SystemInfo.init(), 57 | measurements: nil 58 | } 59 | end 60 | 61 | @spec run(t()) :: t() 62 | def run(%__MODULE__{scenario: scenario, configuration: config} = suite) do 63 | Mix.shell().info("Running scenario \"#{inspect(scenario)}\"...") 64 | task = Task.async(fn -> suite.scenario.run() end) 65 | 66 | Mix.shell().info("Waiting #{inspect(config.delay)} seconds...") 67 | Process.sleep(:timer.seconds(config.delay)) 68 | 69 | Mix.shell().info("Benchmarking for #{inspect(config.duration)} seconds...") 70 | 71 | measurements = 72 | Measurements.gather(config.duration, config.cpu_interval, config.memory_interval) 73 | 74 | if Process.alive?(task.pid) || config.attached? do 75 | Mix.shell().info("Benchmarking finished. Stopping scenario.") 76 | 77 | case Task.shutdown(task, :brutal_kill) do 78 | {:exit, reason} -> 79 | raise "The scenario process unexpectedly died due to #{inspect(reason)}." 80 | 81 | _other -> 82 | :ok 83 | end 84 | else 85 | Mix.shell().error(""" 86 | The scenario had been completed before the measurements ended. 87 | Consider decreasing duration/delay or making the scenario run longer to get more accurate results. 88 | """) 89 | end 90 | 91 | %__MODULE__{suite | measurements: measurements} 92 | end 93 | 94 | @spec save(t()) :: :ok 95 | def save(%__MODULE__{configuration: config} = suite) do 96 | output_dir = output_dir_for(suite) 97 | File.mkdir_p!(output_dir) 98 | 99 | new_path = Path.join([output_dir, @suite_filename]) 100 | old_path = Path.join([output_dir, @old_suite_filename]) 101 | 102 | if File.exists?(new_path) do 103 | File.rename!(new_path, old_path) 104 | end 105 | 106 | File.write!(new_path, :erlang.term_to_binary(suite)) 107 | 108 | Mix.shell().info("The results were saved to \"#{inspect(config.output_dir)}`\" directory.") 109 | end 110 | 111 | @spec try_load_base(t()) :: {:ok, t()} | {:error, File.posix()} 112 | def try_load_base(%__MODULE__{} = suite) do 113 | output_dir = output_dir_for(suite) 114 | 115 | with old_path <- Path.join([output_dir, @old_suite_filename]), 116 | {:ok, suite} <- File.read(old_path), 117 | suite <- :erlang.binary_to_term(suite) do 118 | {:ok, suite} 119 | end 120 | end 121 | 122 | defp output_dir_for(%__MODULE__{configuration: config} = suite) do 123 | scenario_dir = suite.scenario |> Atom.to_string() |> String.replace(".", "") 124 | config_dir = "#{config.delay}_#{config.duration}" 125 | 126 | Path.join([config.output_dir, scenario_dir, config_dir]) 127 | end 128 | end 129 | -------------------------------------------------------------------------------- /lib/beamchmark/suite/configuration.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Suite.Configuration do 2 | @moduledoc """ 3 | The module defines a structure used to configure `#{inspect(Beamchmark.Suite)}`. For more information 4 | about customizing #{inspect(Beamchmark)}, refer to `t:Beamchmark.options_t/0`. 5 | """ 6 | 7 | alias Beamchmark.Formatter 8 | 9 | @type t :: %__MODULE__{ 10 | name: String.t() | nil, 11 | duration: pos_integer(), 12 | cpu_interval: pos_integer(), 13 | memory_interval: pos_integer(), 14 | delay: non_neg_integer(), 15 | formatters: [Formatter.t()], 16 | output_dir: Path.t(), 17 | compare?: boolean(), 18 | attached?: boolean(), 19 | metadata: map() 20 | } 21 | 22 | @enforce_keys [ 23 | :duration, 24 | :cpu_interval, 25 | :memory_interval, 26 | :delay, 27 | :formatters, 28 | :compare?, 29 | :output_dir, 30 | :attached?, 31 | :metadata 32 | ] 33 | 34 | defstruct @enforce_keys ++ [:name] 35 | 36 | @spec get_configuration(Keyword.t(), __MODULE__.t()) :: __MODULE__.t() 37 | def get_configuration(opts, default_config) do 38 | %Beamchmark.Suite.Configuration{ 39 | name: Keyword.get(opts, :name), 40 | duration: Keyword.get(opts, :duration, default_config.duration), 41 | cpu_interval: Keyword.get(opts, :cpu_interval, default_config.cpu_interval), 42 | memory_interval: Keyword.get(opts, :memory_interval, default_config.memory_interval), 43 | delay: Keyword.get(opts, :delay, default_config.delay), 44 | formatters: Keyword.get(opts, :formatters, default_config.formatters), 45 | compare?: Keyword.get(opts, :compare?, default_config.compare?), 46 | output_dir: Keyword.get(opts, :output_dir, default_config.output_dir) |> Path.expand(), 47 | attached?: Keyword.get(opts, :attached?, default_config.attached?), 48 | metadata: Keyword.get(opts, :metadata, default_config.metadata) 49 | } 50 | end 51 | end 52 | -------------------------------------------------------------------------------- /lib/beamchmark/suite/cpu/cpu_task.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Suite.CPU.CpuTask do 2 | @moduledoc """ 3 | This module contains the CPU benchmarking task. 4 | Measurements are performed using [`:cpu_sup.util/1`](https://www.erlang.org/doc/man/cpu_sup.html) 5 | Currently (according to docs), as busy processor states we identify: 6 | - user 7 | - nice_user (low priority use mode) 8 | - kernel 9 | """ 10 | use Task 11 | 12 | alias Beamchmark.Suite.Measurements.CpuInfo 13 | alias Beamchmark.Utils 14 | 15 | @interfere_timeout 100 16 | 17 | @doc """ 18 | 19 | """ 20 | @spec start_link(cpu_interval :: pos_integer(), duration :: pos_integer()) :: Task.t() 21 | def start_link(cpu_interval, duration) do 22 | Task.async(fn -> 23 | run_poll(cpu_interval, duration) 24 | end) 25 | end 26 | 27 | @spec run_poll(number(), number()) :: {:ok, CpuInfo.t()} 28 | defp run_poll(cpu_interval, duration) do 29 | do_run_poll(Utils.get_os_name(), cpu_interval, duration) 30 | end 31 | 32 | @spec do_run_poll(atom(), number(), number()) :: {:ok, CpuInfo.t()} 33 | defp do_run_poll(:Windows, cpu_interval, duration) do 34 | iterations_number = trunc(duration / cpu_interval) 35 | pid = self() 36 | 37 | spawn_snapshot = fn iteration -> 38 | spawn(fn -> cpu_snapshot_windows(pid, iteration * cpu_interval / 1000) end) 39 | Process.sleep(cpu_interval) 40 | end 41 | 42 | Task.async(fn -> 43 | Enum.each(1..iterations_number, spawn_snapshot) 44 | end) 45 | 46 | cpu_snapshots = receive_snapshots(iterations_number) 47 | {:ok, CpuInfo.from_cpu_snapshots(cpu_snapshots)} 48 | end 49 | 50 | defp do_run_poll(_os, cpu_interval, duration) do 51 | iterations_number = trunc(duration / cpu_interval) 52 | :cpu_sup.start() 53 | # First run returns garbage acc to docs 54 | :cpu_sup.util([:per_cpu]) 55 | # And the fact of measurement is polluting the results, 56 | # So we need to wait for @interfere_timeout 57 | Process.sleep(@interfere_timeout) 58 | 59 | if cpu_interval < @interfere_timeout do 60 | raise "cpu_interval (#{cpu_interval}) can't be less than #{@interfere_timeout}" 61 | end 62 | 63 | cpu_snapshots = 64 | Enum.map(1..iterations_number, fn iteration -> 65 | Process.sleep(cpu_interval) 66 | cpu_snapshot() |> Map.put(:timestamp, iteration * cpu_interval / 1000) 67 | end) 68 | 69 | {:ok, CpuInfo.from_cpu_snapshots(cpu_snapshots)} 70 | end 71 | 72 | defp receive_snapshots(snapshots_no, cpu_snapshots \\ []) do 73 | case snapshots_no do 74 | 0 -> 75 | cpu_snapshots 76 | 77 | _snapshots_no -> 78 | cpu_snapshots = 79 | receive do 80 | {:cpu_snapshot, snapshot} -> 81 | [snapshot | cpu_snapshots] 82 | end 83 | 84 | receive_snapshots(snapshots_no - 1, cpu_snapshots) 85 | end 86 | end 87 | 88 | @spec cpu_snapshot_windows(pid(), number()) :: nil 89 | defp cpu_snapshot_windows(pid, timestamp) do 90 | {cpu_util_result, 0} = System.cmd("wmic", ["cpu", "get", "loadpercentage"]) 91 | 92 | average_all_cores = 93 | try do 94 | cpu_util_result 95 | |> String.split("\r\r\n") 96 | |> Enum.at(1) 97 | |> String.trim() 98 | |> Float.parse() 99 | |> elem(0) 100 | rescue 101 | ArgumentError -> 0.0 102 | end 103 | 104 | send( 105 | pid, 106 | {:cpu_snapshot, 107 | %{ 108 | timestamp: timestamp, 109 | cpu_usage: %{}, 110 | average_all_cores: average_all_cores 111 | }} 112 | ) 113 | end 114 | 115 | defp cpu_snapshot() do 116 | cpu_util_result = :cpu_sup.util([:per_cpu]) 117 | 118 | cpu_core_usage_map = 119 | Enum.reduce(cpu_util_result, %{}, fn {core_id, usage, _idle, _mix}, cpu_core_usage_acc -> 120 | Map.put(cpu_core_usage_acc, core_id, usage) 121 | end) 122 | 123 | average_all_cores = 124 | Enum.reduce(cpu_core_usage_map, 0, fn {_core_id, usage}, average_all_cores_acc -> 125 | average_all_cores_acc + usage 126 | end) / map_size(cpu_core_usage_map) 127 | 128 | %{ 129 | cpu_usage: cpu_core_usage_map, 130 | average_all_cores: average_all_cores 131 | } 132 | end 133 | end 134 | -------------------------------------------------------------------------------- /lib/beamchmark/suite/measurements.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Suite.Measurements do 2 | @moduledoc """ 3 | The module is responsible for gathering BEAM statistics during benchmarking. 4 | """ 5 | 6 | alias __MODULE__.CpuInfo 7 | alias __MODULE__.MemoryInfo 8 | alias __MODULE__.SchedulerInfo 9 | alias Beamchmark.Suite.CPU.CpuTask 10 | alias Beamchmark.Suite.Memory.MemoryTask 11 | 12 | @type reductions_t() :: non_neg_integer() 13 | @type context_switches_t() :: non_neg_integer() 14 | 15 | @type t :: %__MODULE__{ 16 | scheduler_info: SchedulerInfo.t(), 17 | cpu_info: CpuInfo.t(), 18 | memory_info: MemoryInfo.t(), 19 | reductions: reductions_t(), 20 | context_switches: context_switches_t() 21 | } 22 | 23 | @enforce_keys [ 24 | :scheduler_info, 25 | :reductions, 26 | :context_switches 27 | ] 28 | defstruct [ 29 | :scheduler_info, 30 | :reductions, 31 | :context_switches, 32 | :cpu_info, 33 | :memory_info 34 | ] 35 | 36 | @spec gather(pos_integer(), pos_integer(), pos_integer()) :: t() 37 | def gather(duration, cpu_interval, memory_interval) do 38 | sample = :scheduler.sample_all() 39 | 40 | cpu_task = CpuTask.start_link(cpu_interval, duration * 1000) 41 | memory_task = MemoryTask.start_link(memory_interval, duration * 1000) 42 | 43 | Process.sleep(:timer.seconds(duration)) 44 | 45 | scheduler_info = 46 | sample 47 | |> :scheduler.utilization() 48 | |> SchedulerInfo.from_sched_util_result() 49 | 50 | {reductions, _reductions_from_last_call} = :erlang.statistics(:reductions) 51 | 52 | # second element of this tuple is always 0 53 | {context_switches, 0} = :erlang.statistics(:context_switches) 54 | 55 | {:ok, cpu_info} = Task.await(cpu_task, :infinity) 56 | {:ok, memory_info} = Task.await(memory_task, :infinity) 57 | 58 | %__MODULE__{ 59 | scheduler_info: scheduler_info, 60 | reductions: reductions, 61 | context_switches: context_switches, 62 | cpu_info: cpu_info, 63 | memory_info: memory_info 64 | } 65 | end 66 | 67 | @spec diff(t(), t()) :: t() 68 | def diff(base, new) do 69 | scheduler_info_diff = SchedulerInfo.diff(base.scheduler_info, new.scheduler_info) 70 | cpu_info_diff = CpuInfo.diff(base.cpu_info, new.cpu_info) 71 | memory_info_diff = MemoryInfo.diff(base.memory_info, new.memory_info) 72 | 73 | %__MODULE__{ 74 | scheduler_info: scheduler_info_diff, 75 | reductions: new.reductions - base.reductions, 76 | context_switches: new.context_switches - base.context_switches, 77 | cpu_info: cpu_info_diff, 78 | memory_info: memory_info_diff 79 | } 80 | end 81 | end 82 | -------------------------------------------------------------------------------- /lib/beamchmark/suite/measurements/cpu_info.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Suite.Measurements.CpuInfo do 2 | @moduledoc """ 3 | Module representing statistics about cpu usage. 4 | 5 | Method of measuring: 6 | - Take a snapshot of cpu usage every `cpu_interval` milliseconds 7 | - Calculate the average cpu usage of processor (combining each core usage) 8 | - At the end combine the results and calculate the average 9 | 10 | **Warning!** 11 | This module can give unstable cpu usage values when measuring a short time because of a high cpu volatility. 12 | TODO Can be improved by taking average of 5-10 values for each snapshot 13 | """ 14 | 15 | use Bunch.Access 16 | alias Beamchmark.Math 17 | 18 | @typedoc """ 19 | All information gathered via single snapshot + processor average 20 | """ 21 | @type cpu_snapshot_t :: 22 | %{ 23 | timestamp: pos_integer(), 24 | cpu_usage: %{ 25 | (core_id :: integer()) => usage :: Math.percent_t() 26 | }, 27 | average_all_cores: average_all_cores :: Math.percent_t() 28 | } 29 | 30 | @typedoc """ 31 | All information gathered via all snapshots 32 | `all_average` is average from all snapshots 33 | """ 34 | @type t :: %__MODULE__{ 35 | cpu_snapshots: [cpu_snapshot_t()] | nil, 36 | average_by_core: %{ 37 | (core_id :: number()) => usage :: Math.percent_t() | float() 38 | }, 39 | average_all: Math.percent_t() | float() 40 | } 41 | 42 | defstruct cpu_snapshots: [], 43 | average_by_core: %{}, 44 | average_all: 0 45 | 46 | @doc """ 47 | Converts list of `cpu_snapshot_t` to ` #{__MODULE__}.t()` 48 | By calculating the average 49 | """ 50 | @spec from_cpu_snapshots([cpu_snapshot_t()]) :: t() 51 | def from_cpu_snapshots(cpu_snapshots) do 52 | average_all = 53 | Enum.reduce(cpu_snapshots, 0, fn map, average_all_acc -> 54 | average_all_acc + map.average_all_cores 55 | end) / length(cpu_snapshots) 56 | 57 | sum_by_core = 58 | Enum.reduce(cpu_snapshots, %{}, fn %{cpu_usage: cpu_usage}, sum_cores_acc -> 59 | reduce_cpu_usage(cpu_usage, sum_cores_acc) 60 | end) 61 | 62 | number_of_snapshots = length(cpu_snapshots) 63 | 64 | average_by_core = 65 | Enum.reduce(sum_by_core, %{}, fn {core_id, value}, average_by_core_acc -> 66 | Map.put(average_by_core_acc, core_id, value / number_of_snapshots) 67 | end) 68 | 69 | %__MODULE__{ 70 | cpu_snapshots: cpu_snapshots, 71 | average_by_core: average_by_core, 72 | average_all: average_all 73 | } 74 | end 75 | 76 | @spec diff(t(), t()) :: t() 77 | def diff(base, new) do 78 | average_by_core_diff = 79 | Enum.reduce(new.average_by_core, %{}, fn {core_id, new_core_avg}, 80 | average_by_core_diff_acc -> 81 | Map.put( 82 | average_by_core_diff_acc, 83 | core_id, 84 | new_core_avg - Map.fetch!(base.average_by_core, core_id) 85 | ) 86 | end) 87 | 88 | %__MODULE__{ 89 | cpu_snapshots: nil, 90 | average_all: new.average_all - base.average_all, 91 | average_by_core: average_by_core_diff 92 | } 93 | end 94 | 95 | defp reduce_cpu_usage(cpu_usage, sum_cores_acc) do 96 | Enum.reduce(cpu_usage, sum_cores_acc, fn {core_id, usage}, sum_cores_acc -> 97 | Map.update(sum_cores_acc, core_id, usage, &(&1 + usage)) 98 | end) 99 | end 100 | end 101 | -------------------------------------------------------------------------------- /lib/beamchmark/suite/measurements/memory_info.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Suite.Measurements.MemoryInfo do 2 | @moduledoc """ 3 | Module representing statistics about memory usage. 4 | """ 5 | 6 | @type bytes_t :: non_neg_integer 7 | 8 | @type memory_snapshot_t :: %{ 9 | timestamp: pos_integer(), 10 | total: bytes_t, 11 | processes: bytes_t, 12 | processes_used: bytes_t, 13 | system: bytes_t, 14 | atom: bytes_t, 15 | atom_used: bytes_t, 16 | binary: bytes_t, 17 | code: bytes_t, 18 | ets: bytes_t 19 | } 20 | 21 | @type t :: %__MODULE__{ 22 | memory_snapshots: [memory_snapshot_t()] | nil, 23 | average: memory_snapshot_t() 24 | } 25 | 26 | @enforce_keys [:memory_snapshots, :average] 27 | 28 | defstruct memory_snapshots: [], 29 | average: %{} 30 | 31 | @spec from_memory_snapshots([memory_snapshot_t()]) :: __MODULE__.t() 32 | def from_memory_snapshots(memory_snapshots) do 33 | mem_types = memory_snapshots |> List.first() |> Map.keys() |> List.delete(:timestamp) 34 | 35 | average = 36 | Enum.reduce(mem_types, %{}, fn mem_type, average -> 37 | mem_type_avg = 38 | memory_snapshots 39 | |> Enum.map(&Map.get(&1, mem_type)) 40 | |> Enum.sum() 41 | |> div(length(memory_snapshots)) 42 | 43 | Map.put(average, mem_type, mem_type_avg) 44 | end) 45 | 46 | %__MODULE__{memory_snapshots: memory_snapshots, average: average} 47 | end 48 | 49 | @spec diff(__MODULE__.t(), __MODULE__.t()) :: 50 | Beamchmark.Suite.Measurements.MemoryInfo.t() 51 | def diff(base, new) do 52 | average_diff = 53 | Enum.reduce(Map.keys(base.average), %{}, fn mem_type, average -> 54 | Map.put( 55 | average, 56 | mem_type, 57 | Map.get(new.average, mem_type) - Map.get(base.average, mem_type) 58 | ) 59 | end) 60 | 61 | %__MODULE__{ 62 | memory_snapshots: nil, 63 | average: average_diff 64 | } 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /lib/beamchmark/suite/measurements/scheduler_info.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Suite.Measurements.SchedulerInfo do 2 | @moduledoc """ 3 | Module representing different statistics about scheduler usage. 4 | """ 5 | 6 | use Bunch.Access 7 | 8 | alias Beamchmark.Math 9 | 10 | @type sched_usage_t :: %{ 11 | (sched_id :: integer()) => 12 | {util :: float(), percent :: Math.percent_t() | Math.percent_diff_t()} 13 | } 14 | @type total_sched_usage_t :: 15 | {util :: float(), percent :: Math.percent_t() | Math.percent_diff_t()} 16 | @type weighted_sched_usage_t :: 17 | {util :: float(), percent :: Math.percent_t() | Math.percent_diff_t()} 18 | 19 | @type t :: %__MODULE__{ 20 | normal: sched_usage_t(), 21 | cpu: sched_usage_t(), 22 | io: sched_usage_t(), 23 | total_normal: total_sched_usage_t(), 24 | total_cpu: total_sched_usage_t(), 25 | total_io: total_sched_usage_t(), 26 | total: total_sched_usage_t(), 27 | weighted: weighted_sched_usage_t() 28 | } 29 | 30 | defstruct normal: %{}, 31 | cpu: %{}, 32 | io: %{}, 33 | total_normal: {0, 0}, 34 | total_cpu: {0, 0}, 35 | total_io: {0, 0}, 36 | total: {0, 0}, 37 | weighted: {0, 0} 38 | 39 | # converts output of `:scheduler.utilization/1 to `SchedulerInfo.t()` 40 | @spec from_sched_util_result(any()) :: t() 41 | def from_sched_util_result(sched_util_result) do 42 | scheduler_info = 43 | sched_util_result 44 | |> Enum.reduce(%__MODULE__{}, fn 45 | {sched_type, sched_id, util, percent}, scheduler_info 46 | when sched_type in [:normal, :cpu, :io] -> 47 | # convert from charlist to string, remove trailing percent sign and convert to float 48 | percent = String.slice("#{percent}", 0..-2//1) |> String.to_float() 49 | put_in(scheduler_info, [sched_type, sched_id], {util, percent}) 50 | 51 | {type, util, percent}, scheduler_info when type in [:total, :weighted] -> 52 | percent = String.slice("#{percent}", 0..-2//1) |> String.to_float() 53 | put_in(scheduler_info[type], {util, percent}) 54 | end) 55 | 56 | total_normal = typed_total(scheduler_info.normal) 57 | total_cpu = typed_total(scheduler_info.cpu) 58 | total_io = typed_total(scheduler_info.io) 59 | 60 | %__MODULE__{ 61 | scheduler_info 62 | | total_normal: total_normal, 63 | total_cpu: total_cpu, 64 | total_io: total_io 65 | } 66 | end 67 | 68 | @spec diff(t(), t()) :: t() 69 | def diff(base, new) do 70 | normal_diff = sched_usage_diff(base.normal, new.normal) 71 | cpu_diff = sched_usage_diff(base.cpu, new.cpu) 72 | io_diff = sched_usage_diff(base.io, new.io) 73 | 74 | total_normal_diff = sched_usage_diff(base.total_normal, new.total_normal) 75 | total_cpu_diff = sched_usage_diff(base.total_cpu, new.total_cpu) 76 | total_io_diff = sched_usage_diff(base.total_io, new.total_io) 77 | total_diff = sched_usage_diff(base.total, new.total) 78 | 79 | weighted_diff = sched_usage_diff(base.weighted, new.weighted) 80 | 81 | %__MODULE__{ 82 | normal: normal_diff, 83 | cpu: cpu_diff, 84 | io: io_diff, 85 | total_normal: total_normal_diff, 86 | total_cpu: total_cpu_diff, 87 | total_io: total_io_diff, 88 | total: total_diff, 89 | weighted: weighted_diff 90 | } 91 | end 92 | 93 | defp typed_total(scheduler_usage) do 94 | count = scheduler_usage |> Map.keys() |> Enum.count() 95 | 96 | if count != 0 do 97 | util_sum = 98 | scheduler_usage 99 | |> Map.values() 100 | |> Enum.reduce(0, fn {util, _percent}, util_sum -> 101 | util_sum + util 102 | end) 103 | 104 | {util_sum / count, Float.round(util_sum / count * 100, 1)} 105 | else 106 | {0, 0} 107 | end 108 | end 109 | 110 | defp sched_usage_diff(base, new) when is_map(base) and is_map(new) do 111 | Enum.zip(base, new) 112 | |> Map.new(fn 113 | {{sched_id, {base_util, base_percent}}, {sched_id, {new_util, new_percent}}} -> 114 | {sched_id, {new_util - base_util, Math.percent_diff(base_percent, new_percent)}} 115 | end) 116 | end 117 | 118 | defp sched_usage_diff({base_util, base_percent}, {new_util, new_percent}), 119 | do: {new_util - base_util, Math.percent_diff(base_percent, new_percent)} 120 | end 121 | -------------------------------------------------------------------------------- /lib/beamchmark/suite/memory/memory_task.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Suite.Memory.MemoryTask do 2 | @moduledoc """ 3 | This module contains the memory benchmarking task. 4 | Measurements are performed using [`:erlang.memory/0`](https://www.erlang.org/doc/man/erlang.html#memory-0) 5 | """ 6 | use Task 7 | 8 | alias Beamchmark.Suite.Measurements.MemoryInfo 9 | 10 | @spec start_link(mem_interval :: pos_integer(), duration :: pos_integer()) :: Task.t() 11 | def start_link(mem_interval, duration) do 12 | Task.async(fn -> 13 | run_poll(mem_interval, duration) 14 | end) 15 | end 16 | 17 | @spec run_poll(number(), number()) :: {:ok, MemoryInfo.t()} 18 | defp run_poll(mem_interval, duration) do 19 | iterations_number = trunc(duration / mem_interval) 20 | 21 | memory_snapshots = 22 | Enum.map(1..iterations_number, fn iteration -> 23 | Process.sleep(mem_interval) 24 | 25 | memory_snapshot() 26 | |> Map.put(:timestamp, iteration * mem_interval / 1000) 27 | end) 28 | 29 | {:ok, MemoryInfo.from_memory_snapshots(memory_snapshots)} 30 | end 31 | 32 | defp memory_snapshot() do 33 | :erlang.memory() |> Enum.into(%{}) 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /lib/beamchmark/suite/scenarios/empty_scenario.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Scenario.EmptyScenario do 2 | @moduledoc false 3 | 4 | @behaviour Beamchmark.Scenario 5 | 6 | @spec run() :: :ok 7 | def run() do 8 | :ok 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /lib/beamchmark/suite/system_info.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Suite.SystemInfo do 2 | @moduledoc """ 3 | The module defines a struct containing various information about system that is used for benchmarking. 4 | """ 5 | 6 | alias Beamchmark.Utils 7 | 8 | @type t :: %__MODULE__{ 9 | elixir_version: String.t(), 10 | otp_version: String.t(), 11 | nif_version: String.t(), 12 | os: atom(), 13 | mem: pos_integer() | :unknown, 14 | arch: String.t(), 15 | num_cores: pos_integer() 16 | } 17 | 18 | @enforce_keys [:elixir_version, :otp_version, :nif_version, :os, :mem, :arch, :num_cores] 19 | defstruct @enforce_keys 20 | 21 | @spec init :: t() 22 | def init() do 23 | %__MODULE__{ 24 | elixir_version: System.version(), 25 | otp_version: :erlang.system_info(:otp_release) |> List.to_string(), 26 | nif_version: :erlang.system_info(:nif_version) |> List.to_string(), 27 | os: Utils.get_os_name(), 28 | mem: mem(Utils.get_os_name()), 29 | arch: :erlang.system_info(:system_architecture) |> List.to_string(), 30 | num_cores: System.schedulers_online() 31 | } 32 | end 33 | 34 | @spec mem(atom()) :: pos_integer() | :unknown 35 | defp mem(:macOS) do 36 | System.cmd("sysctl", ["-n", "hw.memsize"]) 37 | |> elem(0) 38 | |> String.trim() 39 | |> String.to_integer() 40 | end 41 | 42 | defp mem(:Linux) do 43 | System.cmd("awk", ["/MemTotal/ {print $2}", "/proc/meminfo"]) 44 | |> elem(0) 45 | |> String.trim() 46 | |> String.to_integer() 47 | |> Kernel.*(1024) 48 | end 49 | 50 | defp mem(_os) do 51 | :unknown 52 | end 53 | end 54 | -------------------------------------------------------------------------------- /lib/beamchmark/utils.ex: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Utils do 2 | @moduledoc """ 3 | The module defines utility functions for Beamchmark. 4 | """ 5 | 6 | @spec get_os_name :: :FreeBSD | :Linux | :Windows | :macOS 7 | def get_os_name() do 8 | {_family, name} = :os.type() 9 | 10 | case name do 11 | :darwin -> :macOS 12 | :nt -> :Windows 13 | :freebsd -> :FreeBSD 14 | _other -> :Linux 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.MixProject do 2 | use Mix.Project 3 | 4 | @version "1.4.1" 5 | @github_url "https://github.com/membraneframework/beamchmark" 6 | 7 | def project do 8 | [ 9 | app: :beamchmark, 10 | version: @version, 11 | elixir: "~> 1.13", 12 | elixirc_paths: elixirc_paths(Mix.env()), 13 | start_permanent: Mix.env() == :prod, 14 | deps: deps(), 15 | dialyzer: dialyzer(), 16 | 17 | # hex 18 | description: "Tool for measuring EVM performance", 19 | package: package(), 20 | 21 | # docs 22 | name: "Beamchmark", 23 | source_url: @github_url, 24 | homepage_url: "https://membraneframework.org", 25 | docs: docs() 26 | ] 27 | end 28 | 29 | def application do 30 | [ 31 | extra_applications: [:logger, :runtime_tools, :eex, :os_mon] 32 | ] 33 | end 34 | 35 | defp elixirc_paths(:test), do: ["lib", "test/support"] 36 | defp elixirc_paths(_env), do: ["lib"] 37 | 38 | defp deps do 39 | [ 40 | {:bunch, "~> 1.5"}, 41 | {:math, "~> 0.7.0"}, 42 | {:dialyxir, "~> 1.1", only: :dev, runtime: false}, 43 | {:credo, "~> 1.6", only: :dev, runtime: false}, 44 | {:ex_doc, "~> 0.27", only: :dev, runtime: false} 45 | ] 46 | end 47 | 48 | defp dialyzer() do 49 | opts = [ 50 | flags: [:error_handling], 51 | plt_add_apps: [:mix] 52 | ] 53 | 54 | if System.get_env("CI") == "true" do 55 | # Store PLTs in cacheable directory for CI 56 | File.mkdir_p!(Path.join([__DIR__, "priv", "plts"])) 57 | [plt_local_path: "priv/plts", plt_core_path: "priv/plts"] ++ opts 58 | else 59 | opts 60 | end 61 | end 62 | 63 | defp package do 64 | [ 65 | maintainers: ["Membrane Team"], 66 | licenses: ["Apache-2.0"], 67 | links: %{ 68 | "GitHub" => @github_url, 69 | "Membrane Framework Homepage" => "https://membraneframework.org" 70 | } 71 | ] 72 | end 73 | 74 | defp docs do 75 | [ 76 | main: "readme", 77 | extras: ["README.md", "LICENSE"], 78 | source_ref: "v#{@version}", 79 | nest_modules_by_prefix: [Beamchmark.Suite, Beamchmark.Formatters] 80 | ] 81 | end 82 | end 83 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "bunch": {:hex, :bunch, "1.6.0", "4775f8cdf5e801c06beed3913b0bd53fceec9d63380cdcccbda6be125a6cfd54", [:mix], [], "hexpm", "ef4e9abf83f0299d599daed3764d19e8eac5d27a5237e5e4d5e2c129cfeb9a22"}, 3 | "bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"}, 4 | "credo": {:hex, :credo, "1.6.7", "323f5734350fd23a456f2688b9430e7d517afb313fbd38671b8a4449798a7854", [:mix], [{:bunt, "~> 0.2.1", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "41e110bfb007f7eda7f897c10bf019ceab9a0b269ce79f015d54b0dcf4fc7dd3"}, 5 | "dialyxir": {:hex, :dialyxir, "1.2.0", "58344b3e87c2e7095304c81a9ae65cb68b613e28340690dfe1a5597fd08dec37", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "61072136427a851674cab81762be4dbeae7679f85b1272b6d25c3a839aff8463"}, 6 | "earmark_parser": {:hex, :earmark_parser, "1.4.29", "149d50dcb3a93d9f3d6f3ecf18c918fb5a2d3c001b5d3305c926cddfbd33355b", [:mix], [], "hexpm", "4902af1b3eb139016aed210888748db8070b8125c2342ce3dcae4f38dcc63503"}, 7 | "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, 8 | "ex_doc": {:hex, :ex_doc, "0.29.1", "b1c652fa5f92ee9cf15c75271168027f92039b3877094290a75abcaac82a9f77", [:mix], [{:earmark_parser, "~> 1.4.19", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "b7745fa6374a36daf484e2a2012274950e084815b936b1319aeebcf7809574f6"}, 9 | "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, 10 | "jason": {:hex, :jason, "1.4.0", "e855647bc964a44e2f67df589ccf49105ae039d4179db7f6271dfd3843dc27e6", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "79a3791085b2a0f743ca04cec0f7be26443738779d09302e01318f97bdb82121"}, 11 | "makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"}, 12 | "makeup_elixir": {:hex, :makeup_elixir, "0.16.0", "f8c570a0d33f8039513fbccaf7108c5d750f47d8defd44088371191b76492b0b", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "28b2cbdc13960a46ae9a8858c4bebdec3c9a6d7b4b9e7f4ed1502f8159f338e7"}, 13 | "makeup_erlang": {:hex, :makeup_erlang, "0.1.1", "3fcb7f09eb9d98dc4d208f49cc955a34218fc41ff6b84df7c75b3e6e533cc65f", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "174d0809e98a4ef0b3309256cbf97101c6ec01c4ab0b23e926a9e17df2077cbb"}, 14 | "math": {:hex, :math, "0.7.0", "12af548c3892abf939a2e242216c3e7cbfb65b9b2fe0d872d05c6fb609f8127b", [:mix], [], "hexpm", "7987af97a0c6b58ad9db43eb5252a49fc1dfe1f6d98f17da9282e297f594ebc2"}, 15 | "nimble_parsec": {:hex, :nimble_parsec, "1.2.3", "244836e6e3f1200c7f30cb56733fd808744eca61fd182f731eac4af635cc6d0b", [:mix], [], "hexpm", "c8d789e39b9131acf7b99291e93dae60ab48ef14a7ee9d58c6964f59efb570b0"}, 16 | } 17 | -------------------------------------------------------------------------------- /priv/assets/css/beamchmark.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: "Open Sans", Verdana, Arial, sans-serif; 3 | text-align: center; 4 | min-height: 99vh; 5 | display: flex; 6 | flex-direction: column; 7 | } 8 | 9 | section { 10 | margin-bottom: 20px; 11 | } 12 | 13 | h1 { 14 | font-size: 200%; 15 | } 16 | 17 | h2 { 18 | font-weight: lighter; 19 | } 20 | 21 | a { 22 | color: #E54F6D; 23 | } 24 | 25 | table { 26 | width: 80%; 27 | margin: 0 10%; 28 | border-collapse: collapse; 29 | table-layout: fixed; 30 | font-family: sans-serif; 31 | border-bottom: 5px solid #E54F6D; 32 | } 33 | 34 | thead tr { 35 | background-color: #451F55; 36 | color: #FFFFFF; 37 | } 38 | 39 | th, td { 40 | padding: 10px; 41 | } 42 | 43 | .header, .footer { 44 | padding: 10px; 45 | background-color: #451F55; 46 | border: 3px solid #E54F6D; 47 | color: #FFFFFF; 48 | } 49 | 50 | .results { 51 | width: 80%; 52 | margin: 0 10%; 53 | } 54 | 55 | .footer { 56 | margin-top: auto; 57 | line-height: 1.5; 58 | } -------------------------------------------------------------------------------- /priv/templates/configuration.html.eex: -------------------------------------------------------------------------------- 1 |

Configuration

2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 |
Duration [s]Delay [s]
<%= configuration.duration %><%= configuration.delay %>
16 | 17 |
18 | 19 | 20 | 21 | 22 | <%= for {title, _value} <- configuration.metadata do %> 23 | 24 | <% end %> 25 | 26 | 27 | 28 | 29 | <%= for {_title, value} <- configuration.metadata do %> 30 | 31 | <% end %> 32 | 33 | 34 |
<%= title %>
<%= value %>
35 | -------------------------------------------------------------------------------- /priv/templates/index.html.eex: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Beamchmark Report (<%= format_scenario(new_suite.scenario) %>) 5 | 6 | 7 | <% 8 | assets_dir = Application.app_dir(:beamchmark, "priv/assets") 9 | css_path = Path.join([assets_dir, "css", "beamchmark.css"]) 10 | js_path = Path.join([assets_dir, "js", "plotly-2.9.0.min.js"]) 11 | %> 12 | <%= if inline_assets? do %> 13 | 16 | 19 | <% else %> 20 | 21 | 22 | <% end %> 23 | 24 | 25 |
26 |

Beamchmark Report

27 |

<%= new_suite.configuration.name %>

28 |

<%= format_scenario(new_suite.scenario) %>

29 |
30 |
<%= system(new_suite.system_info) %>
31 |
<%= configuration(new_suite.configuration) %>
32 | <% maybe_base_measurements = if is_nil(base_suite), do: nil, else: base_suite.measurements %> 33 |
<%= measurements(new_suite.measurements, maybe_base_measurements) %>
34 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /priv/templates/measurements.html.eex: -------------------------------------------------------------------------------- 1 |

Results

2 |

Basic metrics

3 |
4 |

Utilization by scheduler type

5 |
6 |

Total scheduler utilization

7 |
8 |

Utilization by scheduler type and ID

9 |
10 |
11 |
12 |

CPU utilization

13 |
14 |
15 |

Memory utilization

16 |
17 | 366 | -------------------------------------------------------------------------------- /priv/templates/system.html.eex: -------------------------------------------------------------------------------- 1 |

System

2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 |
Elixir versionOTP versionNIF versionOSMemoryArchitectureNumber of cores
<%= system_info.elixir_version %><%= system_info.otp_version %><%= system_info.nif_version %><%= system_info.os %><%= Utils.format_memory(system_info.mem) %><%= system_info.arch %><%= system_info.num_cores %>
26 | -------------------------------------------------------------------------------- /test/beamchmark/application_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.ApplicationTest do 2 | use ExUnit.Case, async: true 3 | 4 | @temp_directory TestUtils.temporary_dir(__MODULE__) 5 | 6 | setup do 7 | options = [delay: 0, duration: 1, output_dir: @temp_directory, compare?: true, formatters: []] 8 | 9 | on_exit(fn -> File.rm_rf!(@temp_directory) end) 10 | 11 | [options: options] 12 | end 13 | 14 | test "Beamchmark runs properly", %{options: options} do 15 | assert :ok == Beamchmark.run(MockScenario, options) 16 | # check whether Beamchmark can read and compare new results with the previous one 17 | assert :ok == Beamchmark.run(MockScenario, options) 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /test/beamchmark/formatter_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.FormatterTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias Beamchmark.{Formatter, Suite} 5 | 6 | @temp_directory TestUtils.temporary_dir(__MODULE__) 7 | 8 | defp put_formatters(suite, formatters) do 9 | Map.update!(suite, :configuration, &Map.put(&1, :formatters, List.wrap(formatters))) 10 | end 11 | 12 | setup_all do 13 | suite = TestUtils.suite_with_measurements(MockScenario, output_dir: @temp_directory) 14 | 15 | [suite: suite] 16 | end 17 | 18 | describe "Formatter.output/1 validates formatters by" do 19 | test "accepting valid formatters", %{suite: suite} do 20 | valid_formatters = [ 21 | [], 22 | ValidFormatter, 23 | List.duplicate(ValidFormatter, 3), 24 | List.duplicate({ValidFormatter, [valid: :options]}, 3), 25 | [ValidFormatter, {ValidFormatter, [these: :are, valid: :options]}] 26 | ] 27 | 28 | Enum.each(valid_formatters, fn formatters -> 29 | assert :ok = suite |> put_formatters(formatters) |> Formatter.output() 30 | end) 31 | end 32 | 33 | test "raising on invalid formatters", %{suite: suite} do 34 | invalid_formatters = [ 35 | InvalidFormatter, 36 | [ValidFormatter, InvalidFormatter], 37 | [ValidFormatter, {InvalidFormatter, [valid: :options]}], 38 | [InvalidFormatter, {ValidFormatter, [valid: :options]}] 39 | ] 40 | 41 | Enum.each(invalid_formatters, fn formatters -> 42 | assert_raise RuntimeError, fn -> 43 | suite |> put_formatters(formatters) |> Formatter.output() 44 | end 45 | end) 46 | end 47 | 48 | test "raising on invalid options", %{suite: suite} do 49 | invalid_options = [%{invalid: :options}, nil, [1, 2, 3], [{1, 2}, {"a", "b"}]] 50 | 51 | Enum.each(invalid_options, fn options -> 52 | assert_raise RuntimeError, fn -> 53 | suite |> put_formatters({ValidFormatter, options}) |> Formatter.output() 54 | end 55 | end) 56 | end 57 | end 58 | 59 | describe "When formatters are valid, Formatter.output/1" do 60 | setup %{suite: suite} do 61 | spy_formatter_options = [pid: self(), not: :important, configuration: :info] 62 | 63 | on_exit(fn -> File.rm_rf!(@temp_directory) end) 64 | 65 | [suite: suite, spy_options: spy_formatter_options] 66 | end 67 | 68 | test "passes options to formatters", %{suite: suite, spy_options: spy_options} do 69 | suite = put_formatters(suite, {SpyFormatter, spy_options}) 70 | 71 | assert :ok = Formatter.output(suite) 72 | assert_received {^suite, ^spy_options} 73 | assert_received {:ok, ^spy_options} 74 | end 75 | 76 | test "provides base suite if configured to do so", %{suite: suite, spy_options: spy_options} do 77 | suite = 78 | suite 79 | |> Map.update!(:configuration, &Map.put(&1, :compare?, true)) 80 | |> put_formatters({SpyFormatter, spy_options}) 81 | 82 | # "base" 83 | :ok = Suite.save(suite) 84 | # "new" 85 | :ok = Suite.save(suite) 86 | 87 | assert :ok = Formatter.output(suite) 88 | assert_received {^suite, ^suite, ^spy_options} 89 | assert_received {:ok, ^spy_options} 90 | refute_received _ 91 | end 92 | 93 | test "does not provide base suite if there is no such", %{ 94 | suite: suite, 95 | spy_options: spy_options 96 | } do 97 | suite = 98 | suite 99 | |> Map.update!(:configuration, &Map.put(&1, :compare?, true)) 100 | |> put_formatters({SpyFormatter, spy_options}) 101 | 102 | # "new" 103 | :ok = Suite.save(suite) 104 | 105 | assert :ok = Formatter.output(suite) 106 | assert_received {^suite, ^spy_options} 107 | assert_received {:ok, ^spy_options} 108 | refute_received _ 109 | end 110 | 111 | test "does not provide base suite if configured not to do so", %{ 112 | suite: suite, 113 | spy_options: spy_options 114 | } do 115 | suite = put_formatters(suite, {SpyFormatter, spy_options}) 116 | 117 | # "base" 118 | :ok = Suite.save(suite) 119 | # "new" 120 | :ok = Suite.save(suite) 121 | 122 | assert :ok = Formatter.output(suite) 123 | assert_received {^suite, ^spy_options} 124 | assert_received {:ok, ^spy_options} 125 | refute_received _ 126 | end 127 | end 128 | end 129 | -------------------------------------------------------------------------------- /test/beamchmark/formatters/console_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Formatters.ConsoleTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias Beamchmark.Formatters.Console 5 | 6 | @temp_directory TestUtils.temporary_dir(__MODULE__) 7 | 8 | setup_all do 9 | suite = TestUtils.suite_with_measurements(MockScenario, output_dir: @temp_directory) 10 | 11 | on_exit(fn -> File.rm_rf!(@temp_directory) end) 12 | 13 | [suite: suite] 14 | end 15 | 16 | describe "Console formatter" do 17 | test "format/2 returns a string", %{suite: suite} do 18 | assert is_binary(Console.format(suite, [])) 19 | end 20 | 21 | test "format/3 returns a string", %{suite: suite} do 22 | assert is_binary(Console.format(suite, suite, [])) 23 | end 24 | 25 | test "write/2 returns :ok" do 26 | assert :ok = Console.write("should print on console", []) 27 | end 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /test/beamchmark/formatters/html_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Formatters.HTMLTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias Beamchmark.Formatters.HTML 5 | alias Beamchmark.Formatters.HTML.Templates 6 | alias Beamchmark.Suite.CPU.CpuTask 7 | 8 | @temp_directory TestUtils.temporary_dir(__MODULE__) 9 | @assets_paths TestUtils.html_assets_paths() 10 | 11 | setup_all do 12 | suite = TestUtils.suite_with_measurements(MockScenario, output_dir: @temp_directory) 13 | 14 | on_exit(fn -> File.rm_rf!(@temp_directory) end) 15 | 16 | [suite: suite] 17 | end 18 | 19 | describe "HTML formatter" do 20 | test "format/2 returns a string", %{suite: suite} do 21 | assert is_binary(HTML.format(suite, [])) 22 | end 23 | 24 | test "format/3 returns a string", %{suite: suite} do 25 | assert is_binary(HTML.format(suite, suite, [])) 26 | end 27 | 28 | test "format/2 respects inline_assets? flag", %{suite: suite} do 29 | html_assets_linked = HTML.format(suite, inline_assets?: false) 30 | html_assets_inlined = HTML.format(suite, inline_assets?: true) 31 | 32 | Enum.each(@assets_paths, fn asset_path -> 33 | assert String.contains?(html_assets_linked, asset_path) 34 | assert String.contains?(html_assets_inlined, File.read!(asset_path)) 35 | end) 36 | end 37 | 38 | test "format/3 respects inline_assets? flag", %{suite: suite} do 39 | html_assets_linked = HTML.format(suite, suite, inline_assets?: false) 40 | html_assets_inlined = HTML.format(suite, suite, inline_assets?: true) 41 | 42 | Enum.each(@assets_paths, fn asset_path -> 43 | assert String.contains?(html_assets_linked, asset_path) 44 | assert String.contains?(html_assets_inlined, File.read!(asset_path)) 45 | end) 46 | end 47 | 48 | test "format/2 generates reports of predictable size", %{suite: suite} do 49 | html_assets_linked = HTML.format(suite, inline_assets?: false) 50 | html_assets_inlined = HTML.format(suite, inline_assets?: true) 51 | 52 | expected_size_linked = 16_000 53 | expected_size_inlined = 3_670_000 54 | 55 | assert_in_delta byte_size(html_assets_linked), 56 | expected_size_linked, 57 | 0.5 * expected_size_linked 58 | 59 | assert_in_delta byte_size(html_assets_inlined), 60 | expected_size_inlined, 61 | 0.5 * expected_size_inlined 62 | end 63 | 64 | test "format/3 generates reports of predictable size", %{suite: suite} do 65 | html_assets_linked = HTML.format(suite, suite, inline_assets?: false) 66 | html_assets_inlined = HTML.format(suite, suite, inline_assets?: true) 67 | 68 | expected_size_linked = 16_000 69 | expected_size_inlined = 3_670_000 70 | 71 | assert_in_delta byte_size(html_assets_linked), 72 | expected_size_linked, 73 | 0.5 * expected_size_linked 74 | 75 | assert_in_delta byte_size(html_assets_inlined), 76 | expected_size_inlined, 77 | 0.5 * expected_size_inlined 78 | end 79 | 80 | test "write/2 returns :ok and creates an html file" do 81 | mock_html = "some html content here" 82 | options = [output_path: Path.join([@temp_directory, "test.html"]), auto_open?: false] 83 | 84 | assert :ok = HTML.write(mock_html, options) 85 | assert File.exists?(options[:output_path]) 86 | assert File.read!(options[:output_path]) == mock_html 87 | end 88 | 89 | test "format_average_cpu_usage/1 runs and doesn't return nil" do 90 | cpu_task = 91 | CpuTask.start_link( 92 | 100, 93 | 1000 94 | ) 95 | 96 | assert {:ok, statistics} = Task.await(cpu_task, :infinity) 97 | result = Templates.format_average_cpu_usage(statistics.cpu_snapshots) 98 | assert true == not is_nil(result) 99 | end 100 | 101 | test "format_cpu_usage_by_core/1 runs and doesn't return nil" do 102 | cpu_task = 103 | CpuTask.start_link( 104 | 1000, 105 | 10_000 106 | ) 107 | 108 | assert {:ok, statistics} = Task.await(cpu_task, :infinity) 109 | result = Templates.format_cpu_usage_by_core(statistics.cpu_snapshots) 110 | assert true == not is_nil(result) 111 | end 112 | end 113 | end 114 | -------------------------------------------------------------------------------- /test/beamchmark/formatters/utils_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Beamchmark.Formatters.UtilsTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias Beamchmark.Formatters.Utils 5 | 6 | describe "Formatters Utils" do 7 | test "format_memory/2 returns a string" do 8 | assert is_binary(Utils.format_memory(12_345)) 9 | end 10 | 11 | test "format_memory/2 returns unknown when memory is unknown" do 12 | assert Utils.format_memory(:unknown) == "-" 13 | end 14 | 15 | test "format_memory/2 returns human-readable memory size" do 16 | assert Utils.format_memory(1) == "1 B" 17 | assert Utils.format_memory(1023) == "1023 B" 18 | assert Utils.format_memory(1024) == "1 KB" 19 | assert Utils.format_memory(131_071) == "128 KB" 20 | assert Utils.format_memory(30_886_854) == "29 MB" 21 | assert Utils.format_memory(9_079_560_863) == "8 GB" 22 | assert Utils.format_memory(374_151_781_024) == "348 GB" 23 | end 24 | 25 | test "format_memory/2 rounds correctly" do 26 | assert Utils.format_memory(8386, 3) == "8.189 KB" 27 | assert Utils.format_memory(19_650, 3) == "19.189 KB" 28 | assert Utils.format_memory(372_107_998, 1) == "354.9 MB" 29 | assert Utils.format_memory(372_107_998, 3) == "354.87 MB" 30 | assert Utils.format_memory(372_107_998, 5) == "354.86984 MB" 31 | end 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /test/cpu_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CpuTaskTest do 2 | use ExUnit.Case 3 | alias Beamchmark.Suite.CPU.CpuTask 4 | 5 | test "CpuTask.start_link/2 runs properly" do 6 | cpu_interval = 100 7 | duration = 15_000 8 | 9 | cpu_task = 10 | CpuTask.start_link( 11 | cpu_interval, 12 | duration 13 | ) 14 | 15 | assert {:ok, cpu_info} = Task.await(cpu_task, :infinity) 16 | assert !is_nil(cpu_info.average_by_core) 17 | assert !is_nil(cpu_info.cpu_snapshots) 18 | assert !is_nil(cpu_info.average_all) 19 | 20 | assert duration / cpu_interval == length(cpu_info.cpu_snapshots) 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /test/memory_test.exs: -------------------------------------------------------------------------------- 1 | defmodule MemoryTaskTest do 2 | use ExUnit.Case 3 | 4 | alias Beamchmark.Suite.Memory.MemoryTask 5 | 6 | test("MemoryTask.start_link/2 runs properly") do 7 | mem_interval = 100 8 | duration = 15_000 9 | 10 | mem_task = 11 | MemoryTask.start_link( 12 | mem_interval, 13 | duration 14 | ) 15 | 16 | assert {:ok, mem_info} = Task.await(mem_task, :infinity) 17 | assert !is_nil(mem_info.average.total) 18 | assert !is_nil(mem_info.memory_snapshots) 19 | 20 | assert duration / mem_interval == length(mem_info.memory_snapshots) 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /test/support/invalid_formatter.ex: -------------------------------------------------------------------------------- 1 | defmodule InvalidFormatter do 2 | @moduledoc false 3 | end 4 | -------------------------------------------------------------------------------- /test/support/mock_scenario.ex: -------------------------------------------------------------------------------- 1 | defmodule MockScenario do 2 | @moduledoc false 3 | 4 | @behaviour Beamchmark.Scenario 5 | 6 | @impl true 7 | def run(), do: :noop 8 | end 9 | -------------------------------------------------------------------------------- /test/support/spy_formatter.ex: -------------------------------------------------------------------------------- 1 | defmodule SpyFormatter do 2 | @moduledoc false 3 | 4 | @behaviour Beamchmark.Formatter 5 | 6 | @impl true 7 | def format(suite, options) do 8 | send(options[:pid], {suite, options}) 9 | :ok 10 | end 11 | 12 | @impl true 13 | def format(new_suite, base_suite, options) do 14 | send(options[:pid], {new_suite, base_suite, options}) 15 | :ok 16 | end 17 | 18 | @impl true 19 | def write(data, options) do 20 | send(options[:pid], {data, options}) 21 | :ok 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /test/support/test_utils.ex: -------------------------------------------------------------------------------- 1 | defmodule TestUtils do 2 | @moduledoc false 3 | 4 | alias Beamchmark.{Scenario, Suite} 5 | 6 | @spec temporary_dir(module()) :: Path.t() 7 | def temporary_dir(test_module) do 8 | Path.join([System.tmp_dir!(), test_module |> Atom.to_string() |> String.replace(".", "_")]) 9 | end 10 | 11 | @spec suite_with_measurements(Scenario.t(), Beamchmark.options_t()) :: Suite.t() 12 | def suite_with_measurements(scenario, opts \\ []) do 13 | config = %Suite.Configuration{ 14 | duration: Keyword.get(opts, :duration, 1), 15 | delay: Keyword.get(opts, :delay, 0), 16 | cpu_interval: Keyword.get(opts, :cpu_interval, 1000), 17 | memory_interval: Keyword.get(opts, :memory_interval, 1000), 18 | formatters: Keyword.get(opts, :formatters, []), 19 | compare?: Keyword.get(opts, :compare?, false), 20 | output_dir: Keyword.get(opts, :output_dir, temporary_dir(__MODULE__)), 21 | attached?: Keyword.get(opts, :attached?, false), 22 | metadata: Keyword.get(opts, :metadata, %{}) 23 | } 24 | 25 | scenario |> Suite.init(config) |> Suite.run() 26 | end 27 | 28 | @spec html_assets_paths() :: [Path.t()] 29 | def html_assets_paths() do 30 | assets_dir = Path.join([Application.app_dir(:beamchmark), "priv", "assets"]) 31 | 32 | ["css", "js"] 33 | |> Enum.flat_map(fn asset_type -> [assets_dir, asset_type, "*.#{asset_type}"] end) 34 | |> Path.join() 35 | |> Path.wildcard() 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /test/support/valid_formatter.ex: -------------------------------------------------------------------------------- 1 | defmodule ValidFormatter do 2 | @moduledoc false 3 | 4 | @behaviour Beamchmark.Formatter 5 | 6 | @impl true 7 | def format(_new_suite, _base_suite \\ nil, _options), do: :ok 8 | 9 | @impl true 10 | def write(_data, _options), do: :ok 11 | end 12 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | Mix.shell(Mix.Shell.Quiet) 2 | 3 | ExUnit.start() 4 | --------------------------------------------------------------------------------