├── .clang-format ├── .github └── workflows │ ├── cd.yml │ └── ci.yml ├── .gitignore ├── .vscode ├── launch.json ├── settings.json └── tasks.json ├── CMakeLists.txt ├── Cargo.toml ├── LICENSE ├── README.md ├── doc ├── beast_on_examples.gif ├── commit_range_runtime.png ├── example_repocheck_settings.yaml └── example_time_series.png ├── example_benchmark ├── CMakeLists.txt └── src │ ├── simple.cpp │ └── with_arg_list.cpp └── src ├── config.rs ├── database.rs ├── exec.rs ├── find.rs ├── logger.rs ├── main.rs ├── parse.rs ├── plot.rs └── repocheck.rs /.clang-format: -------------------------------------------------------------------------------- 1 | --- 2 | BasedOnStyle: Google 3 | --- 4 | Language: Cpp 5 | IndentWidth: 4 6 | ColumnLimit: 120 7 | --- 8 | -------------------------------------------------------------------------------- /.github/workflows/cd.yml: -------------------------------------------------------------------------------- 1 | name: cd-chain 2 | 3 | on: 4 | push: 5 | # Sequence of patterns matched against refs/tags 6 | tags: 7 | - 'v*.*.*' 8 | 9 | env: 10 | deploy_target_name: 'x86_64-unknown-linux-gnu' 11 | 12 | jobs: 13 | quickcheck: 14 | runs-on: ubuntu-latest 15 | outputs: 16 | version: ${{ steps.rustversion.outputs.rustversion }} 17 | steps: 18 | - uses: actions/checkout@v2 19 | - run: cargo check 20 | - run: cargo pkgid 21 | - run: 'echo "$(cargo pkgid | cut -d# -f2)"' 22 | - id: rustversion 23 | run: 'echo "::set-output name=rustversion::$(cargo pkgid | cut -d# -f2)"' 24 | - id: puretag 25 | run: 'echo "::set-output name=tag::${GITHUB_REF#refs/*/}"' 26 | 27 | update_release_draft: 28 | needs: quickcheck 29 | runs-on: ubuntu-latest 30 | steps: 31 | - uses: actions/checkout@v2 32 | - name: Install cargo-deb 33 | run: cargo install cargo-deb 34 | - name: Build deb package 35 | run: cargo deb --target=${{ env.deploy_target_name }} 36 | - name: Check repo directory 37 | run: ls -lha 38 | - name: Create release 39 | id: create_release 40 | uses: softprops/action-gh-release@v1 41 | env: 42 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 43 | with: 44 | tag_name: ${{ needs.quickcheck.outputs.puretag }} 45 | draft: true 46 | prerelease: false 47 | files: ./target/${{ env.deploy_target_name }}/debian/beast_${{needs.quickcheck.outputs.version}}_amd64.deb 48 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci-chain 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | 12 | jobs: 13 | quickcheck: 14 | runs-on: ubuntu-latest 15 | outputs: 16 | version: ${{ steps.rustversion.outputs.rustversion }} 17 | steps: 18 | - uses: actions/checkout@v2 19 | - run: cargo check 20 | - run: cargo pkgid 21 | - run: 'echo "$(cargo pkgid | cut -d# -f2)"' 22 | - id: rustversion 23 | run: 'echo "::set-output name=rustversion::$(cargo pkgid | cut -d# -f2)"' 24 | 25 | build: 26 | name: Build ${{ matrix.os }} ${{ matrix.target }} 27 | needs: quickcheck 28 | runs-on: ${{ matrix.os }} 29 | strategy: 30 | matrix: 31 | os: [ubuntu-latest] 32 | target: [x86_64-unknown-linux-gnu] 33 | steps: 34 | - name: Checkout source 35 | uses: actions/checkout@v2 36 | 37 | - name: GCC and Rust version overview 38 | shell: bash 39 | run: | 40 | gcc --version 41 | rustup -V 42 | cargo -V 43 | 44 | - name: Build 45 | run: cargo build --target=${{ matrix.target }} 46 | 47 | - name: Run tests 48 | run: cargo test --target=${{ matrix.target }} 49 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | *.txt.user 13 | /build/ 14 | /target/ 15 | /.idea/ 16 | /.clangd/ 17 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | 5 | { 6 | "name": "g++ build and debug active file", 7 | "type": "cppdbg", 8 | "request": "launch", 9 | "program": "${fileDirname}/${fileBasenameNoExtension}", 10 | "args": [], 11 | "stopAtEntry": false, 12 | "cwd": "${workspaceFolder}", 13 | "environment": [], 14 | "externalConsole": false, 15 | "MIMode": "gdb", 16 | "setupCommands": [ 17 | { 18 | "description": "Enable pretty-printing for gdb", 19 | "text": "-enable-pretty-printing", 20 | "ignoreFailures": true 21 | } 22 | ], 23 | "preLaunchTask": "g++ build active file", 24 | "miDebuggerPath": "/usr/bin/gdb" 25 | }, 26 | { 27 | "name": "Debug benchmark", 28 | "type": "cppdbg", 29 | "request": "launch", 30 | "program": "${workspaceFolder}/build/benchmarks/findprime_benchmark/findprime-benchmark", 31 | "args": [], 32 | "stopAtEntry": false, 33 | "cwd": "${workspaceFolder}", 34 | "environment": [], 35 | "externalConsole": false, 36 | "MIMode": "gdb", 37 | "setupCommands": [ 38 | { 39 | "description": "Enable pretty-printing for gdb", 40 | "text": "-enable-pretty-printing", 41 | "ignoreFailures": true 42 | } 43 | ], 44 | "miDebuggerPath": "/usr/bin/gdb" 45 | }, 46 | { 47 | "name": "(gdb) Launch rust executable", 48 | "type": "cppdbg", 49 | "request": "launch", 50 | "program": "${workspaceFolder}/target/debug/beast", 51 | "args": ["-d", "${workspaceFolder}"], 52 | "stopAtEntry": false, 53 | "cwd": "${workspaceFolder}", 54 | "environment": [], 55 | "externalConsole": false, 56 | "MIMode": "gdb", 57 | "setupCommands": [ 58 | { 59 | "description": "Enable pretty-printing for gdb", 60 | "text": "-enable-pretty-printing", 61 | "ignoreFailures": true 62 | } 63 | ] 64 | }, 65 | ] 66 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "C_Cpp.default.configurationProvider": "vector-of-bool.cmake-tools", 3 | "C_Cpp.clang_format_path": "/usr/bin/clang-format", 4 | "cmake.configureOnOpen": true, 5 | "cmake.configureSettings": { 6 | "CMAKE_EXPORT_COMPILE_COMMANDS": true, 7 | }, 8 | 9 | "files.associations": { 10 | "type_traits": "cpp", 11 | "cctype": "cpp", 12 | "clocale": "cpp", 13 | "cmath": "cpp", 14 | "cstdarg": "cpp", 15 | "cstddef": "cpp", 16 | "cstdio": "cpp", 17 | "cstdlib": "cpp", 18 | "cstring": "cpp", 19 | "ctime": "cpp", 20 | "cwchar": "cpp", 21 | "cwctype": "cpp", 22 | "array": "cpp", 23 | "atomic": "cpp", 24 | "strstream": "cpp", 25 | "bit": "cpp", 26 | "*.tcc": "cpp", 27 | "bitset": "cpp", 28 | "chrono": "cpp", 29 | "complex": "cpp", 30 | "condition_variable": "cpp", 31 | "cstdint": "cpp", 32 | "deque": "cpp", 33 | "list": "cpp", 34 | "map": "cpp", 35 | "set": "cpp", 36 | "unordered_map": "cpp", 37 | "vector": "cpp", 38 | "exception": "cpp", 39 | "algorithm": "cpp", 40 | "functional": "cpp", 41 | "iterator": "cpp", 42 | "memory": "cpp", 43 | "memory_resource": "cpp", 44 | "numeric": "cpp", 45 | "optional": "cpp", 46 | "random": "cpp", 47 | "ratio": "cpp", 48 | "string": "cpp", 49 | "string_view": "cpp", 50 | "system_error": "cpp", 51 | "tuple": "cpp", 52 | "utility": "cpp", 53 | "fstream": "cpp", 54 | "initializer_list": "cpp", 55 | "iomanip": "cpp", 56 | "iosfwd": "cpp", 57 | "iostream": "cpp", 58 | "istream": "cpp", 59 | "limits": "cpp", 60 | "mutex": "cpp", 61 | "new": "cpp", 62 | "ostream": "cpp", 63 | "sstream": "cpp", 64 | "stdexcept": "cpp", 65 | "streambuf": "cpp", 66 | "thread": "cpp", 67 | "cfenv": "cpp", 68 | "cinttypes": "cpp", 69 | "typeindex": "cpp", 70 | "typeinfo": "cpp", 71 | "variant": "cpp" 72 | }, 73 | } -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "tasks": [ 3 | { 4 | "type": "cargo", 5 | "subcommand": "build", 6 | "problemMatcher": [ 7 | "$rustc" 8 | ], 9 | "group": { 10 | "kind": "build", 11 | "isDefault": true 12 | } 13 | }, 14 | { 15 | "type": "cargo", 16 | "subcommand": "test", 17 | "problemMatcher": [ 18 | "$rustc" 19 | ], 20 | "group": { 21 | "kind": "test", 22 | "isDefault": true 23 | } 24 | }, 25 | { 26 | "type": "shell", 27 | "label": "g++ build active file", 28 | "command": "/usr/bin/g++", 29 | "args": [ 30 | "-g", 31 | "${file}", 32 | "-o", 33 | "${fileDirname}/${fileBasenameNoExtension}" 34 | ], 35 | "options": { 36 | "cwd": "/usr/bin" 37 | } 38 | }, 39 | ], 40 | "version": "2.0.0" 41 | } -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.15.3) 2 | 3 | project(benchmarks) 4 | 5 | add_subdirectory(example_benchmark) 6 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "beast" 3 | description = "(be)nchmark (a)nalysis and (s)ummary (t)ool" 4 | version = "2.1.1" 5 | authors = ["Björn Barschtipan"] 6 | edition = "2018" 7 | license = "MIT" 8 | readme = "README.md" 9 | 10 | [dependencies] 11 | is_executable = "0.1.2" 12 | walkdir = "2.3.1" 13 | clap = "2.33.3" 14 | indicatif = "0.15.0" 15 | plotly = "0.6.0" 16 | serde = "1.0.106" 17 | serde_json = "1.0" 18 | preferences = "1.1.0" 19 | regex = "1.4.1" 20 | colored = "2" 21 | yaml-rust = "0.4.4" 22 | serde_yaml= "0.8.14" 23 | git2 = "0.13.12" 24 | execute = "0.2.8" 25 | 26 | [dependencies.mongodb] 27 | version = "1.1.1" 28 | default-features = false 29 | features = ["sync"] 30 | 31 | [package.metadata.deb] 32 | maintainer = "Björn Barschtipan" 33 | copyright = "2021, Björn Barschtipan" 34 | license-file = ["LICENSE", "4"] 35 | extended-description = """\ 36 | A simple command line tool to execute google benchmarks \ 37 | and visualize the results in an nice plot.""" 38 | depends = "$auto" 39 | section = "utility" 40 | priority = "optional" 41 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 BjoB 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # **beast** - **be**nchmark **a**nalysis and **s**ummary **t**ool 2 | 3 | ![ci](https://github.com/bjob/beast/workflows/ci-chain/badge.svg) ![ci](https://github.com/bjob/beast/workflows/cd-chain/badge.svg) ![Latest Release](https://img.shields.io/github/release/bjob/beast.svg?style=flat) 4 | 5 | ``beast`` is a simple tool to execute and post-process a set of google benchmarks. The basic feature is plotting multiple benchmark results in one single plot (powered by `plotly`). It is also possible to configure access to a ``mongoDB`` database and push results to it. 6 | 7 | - [Installation](#installation) 8 | - [Basic Usage Example](#basic-usage-example) 9 | - [Database Setup](#database-setup) 10 | - [Repository Benchmarking](#repository-benchmarking) 11 | 12 | ---------------------------------- 13 | 14 | ## **Installation** 15 | 16 | Download the latest release from: 17 | 18 | [![Download beast](https://img.shields.io/badge/dynamic/json.svg?label=download&url=https://api.github.com/repos/bjob/beast/releases/latest&query=$.assets[0].name&style=for-the-badge)](https://github.com/bjob/beast/releases/latest) 19 | 20 | and install the ``.deb`` package via ``sudo dpkg -i``. 21 | 22 | ## **Basic Usage Example** 23 | 24 | ``beast`` will search for executables with a predefined regular expression pattern in your current working directory as a default. Check ``beast --help`` for an overview about possible options. We will use the C++ example from the repo directory to show the basic functionality (of course you will need to clone the repo for that): 25 | 26 | ``cd`` to the ``example_benchmark`` directory and call: 27 | 28 | ```bash 29 | mkdir build 30 | cd build 31 | cmake .. 32 | cmake --build . --target all 33 | ``` 34 | 35 | you will get some small benchmark executables which can be plotted with ``beast``: 36 | 37 | ![beast_on_examples](doc/beast_on_examples.gif) 38 | 39 | ## **Database Setup** 40 | 41 | If you want to use ``beast``'s database related functionality, you need to set up a ``mongoDB`` database, either by installing the Community Edition from [https://docs.mongodb.com/manual/administration/install-community/](https://docs.mongodb.com/manual/administration/install-community/) in your desired environment or by using the cloud based solution [https://www.mongodb.com/cloud/atlas](https://www.mongodb.com/cloud/atlas). 42 | 43 | Assuming a successful database setup, the only thing which is left to be done is a little configuration via ``beast config``. Set the ``mongoDB``-URI, the database name and the collection name with the according ``--set...`` commands. Note: The collection does not have to be existent, it will be created with the first push to it. 44 | 45 | Finally you should be able to push your most recent generated benchmark results via ``beast dbpush`` or to retrieve and plot previous pushed data with the ``beast dbplot`` command: 46 | 47 | ![beast_on_examples](doc/example_time_series.png) 48 | 49 | ## **Repository Benchmarking** 50 | 51 | To run benchmarks on a certain commit range of a git repository, you need to provide the needed information in a small `yaml` file. 52 | 53 | Example for the `beast` repo: 54 | 55 | ```yaml 56 | version: 1 57 | repo_path: # absolute or relative to cwd 58 | branch_name: master 59 | from_commit: a92f7b6f4e5da30908577b9109040987f6ca9bf6 60 | to_commit: 85347d6fd06acbd700be5237a94ca49486bb5e25 61 | build_commands: | 62 | mkdir build 63 | cd build && cmake .. 64 | cd build && cmake --build . --target all 65 | benchmark_regex: .*benchmark[^.]*$ 66 | ``` 67 | 68 | Adapt the `yaml` to your needs and set the path to it with `beast config --set-repocheck-yaml`. Run and plot the benchmarks with `beast repocheck` (check out `--help` for more details). 69 | 70 | ![beast_commit_range_benchmark](doc/commit_range_runtime.png) 71 | -------------------------------------------------------------------------------- /doc/beast_on_examples.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bytebat/beast/b7ccf1163099f84253b761162839c0a58398fc65/doc/beast_on_examples.gif -------------------------------------------------------------------------------- /doc/commit_range_runtime.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bytebat/beast/b7ccf1163099f84253b761162839c0a58398fc65/doc/commit_range_runtime.png -------------------------------------------------------------------------------- /doc/example_repocheck_settings.yaml: -------------------------------------------------------------------------------- 1 | version: 1 2 | repo_path: . 3 | branch_name: master 4 | from_commit: a92f7b6f4e5da30908577b9109040987f6ca9bf6 5 | to_commit: 85347d6fd06acbd700be5237a94ca49486bb5e25 6 | build_commands: | 7 | mkdir build 8 | cd build && cmake .. 9 | cd build && cmake --build . --target all 10 | benchmark_regex: .*benchmark[^.]*$ 11 | 12 | -------------------------------------------------------------------------------- /doc/example_time_series.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bytebat/beast/b7ccf1163099f84253b761162839c0a58398fc65/doc/example_time_series.png -------------------------------------------------------------------------------- /example_benchmark/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.13.5) 2 | 3 | project(findprime-benchmark LANGUAGES CXX) 4 | 5 | ################ 6 | # Dependencies # 7 | ################ 8 | 9 | find_package(benchmark REQUIRED) 10 | 11 | set(BENCHMARK_ENABLE_TESTING OFF CACHE BOOL "Enable benchmark self tests." FORCE) 12 | 13 | ######### 14 | # Build # 15 | ######### 16 | 17 | macro(add_findprime_benchmark BENCHMARK_BASE_NAME MAIN_CPP_NAME NUMBER) 18 | set(EXE_NAME "${BENCHMARK_BASE_NAME}-${MAIN_CPP_NAME}-${NUMBER}") 19 | add_executable(${EXE_NAME}) 20 | 21 | target_sources(${EXE_NAME} 22 | PRIVATE 23 | src/${MAIN_CPP_NAME}.cpp 24 | ) 25 | 26 | target_link_libraries(${EXE_NAME} 27 | PRIVATE 28 | benchmark::benchmark 29 | ) 30 | 31 | target_compile_definitions(${EXE_NAME} 32 | PRIVATE 33 | N_VALUE=${NUMBER} 34 | ) 35 | endmacro() 36 | 37 | add_findprime_benchmark(${PROJECT_NAME} "simple" 1000) 38 | add_findprime_benchmark(${PROJECT_NAME} "simple" 1250) 39 | add_findprime_benchmark(${PROJECT_NAME} "simple" 1500) 40 | add_findprime_benchmark(${PROJECT_NAME} "with_arg_list" 1000) 41 | -------------------------------------------------------------------------------- /example_benchmark/src/simple.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | #include 5 | 6 | using namespace std; 7 | 8 | bool isPrime(int number) { 9 | int counter = 0; 10 | for (int j = 2; j < number; j++) { 11 | if (number % j == 0) { 12 | counter = 1; 13 | break; 14 | } 15 | } 16 | return (counter == 0 ? true : false); 17 | } 18 | 19 | int calcNthPrime() { 20 | int n = N_VALUE; 21 | int num = 1, count = 0; 22 | 23 | while (true) { 24 | num++; 25 | if (isPrime(num)) { 26 | count++; 27 | } 28 | if (count == n) { 29 | cout << n << "th prime number is " << num << ".\n"; 30 | break; 31 | } 32 | } 33 | return 0; 34 | } 35 | 36 | static void calcNthPrimeBenchmark(benchmark::State& state) { 37 | for (auto _ : state) { 38 | calcNthPrime(); 39 | } 40 | } 41 | 42 | static void calcNthPrimeBenchmark_x2(benchmark::State& state) { 43 | for (auto _ : state) { 44 | calcNthPrime(); 45 | calcNthPrime(); 46 | } 47 | } 48 | 49 | BENCHMARK(calcNthPrimeBenchmark); 50 | BENCHMARK(calcNthPrimeBenchmark_x2); 51 | 52 | BENCHMARK_MAIN(); 53 | -------------------------------------------------------------------------------- /example_benchmark/src/with_arg_list.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | #include 5 | 6 | using namespace std; 7 | 8 | bool isPrime(int number) { 9 | int counter = 0; 10 | for (int j = 2; j < number; j++) { 11 | if (number % j == 0) { 12 | counter = 1; 13 | break; 14 | } 15 | } 16 | return (counter == 0 ? true : false); 17 | } 18 | 19 | int calcNthPrime() { 20 | int n = N_VALUE; 21 | int num = 1, count = 0; 22 | 23 | while (true) { 24 | num++; 25 | if (isPrime(num)) { 26 | count++; 27 | } 28 | if (count == n) { 29 | cout << n << "th prime number is " << num << ".\n"; 30 | break; 31 | } 32 | } 33 | return 0; 34 | } 35 | 36 | static void calcNthPrimeBenchmark(benchmark::State& state) { 37 | for (auto _ : state) { 38 | for (int i = 0; i < state.range(0); ++i) { 39 | calcNthPrime(); 40 | } 41 | } 42 | } 43 | 44 | static void calcNthPrimeBenchmark_x2(benchmark::State& state) { 45 | for (auto _ : state) { 46 | for (int i = 0; i < state.range(0); ++i) { 47 | calcNthPrime(); 48 | calcNthPrime(); 49 | } 50 | } 51 | } 52 | 53 | BENCHMARK(calcNthPrimeBenchmark)->Arg(2)->Arg(4)->Arg(6); 54 | BENCHMARK(calcNthPrimeBenchmark_x2)->Arg(2)->Arg(4)->Arg(6); 55 | 56 | BENCHMARK_MAIN(); 57 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use clap::crate_name; 2 | use colored::Colorize; 3 | use preferences::{AppInfo, Preferences, PreferencesMap}; 4 | 5 | pub const APP_INFO: AppInfo = AppInfo { 6 | name: crate_name!(), 7 | author: "beastuser", 8 | }; 9 | 10 | const DATABASE_CONFIG_PATH: &str = "preferences/mongodb"; 11 | const DATABASE_URI_KEY: &str = "url"; 12 | const DATABASE_NAME_KEY: &str = "database_name"; 13 | const DATABASE_BENCHMARK_COLLECTION_KEY: &str = "collection_name"; 14 | 15 | const GIT_CONFIG_PATH: &str = "preferences/git"; 16 | const GIT_YAML_PATH_KEY: &str = "repocheck_yaml_path"; 17 | 18 | pub struct AppConfig { 19 | db_config: PreferencesMap, 20 | git_config: PreferencesMap, 21 | } 22 | 23 | impl AppConfig { 24 | pub fn init() -> AppConfig { 25 | let loaded_db_config = 26 | match PreferencesMap::::load(&APP_INFO, &DATABASE_CONFIG_PATH) { 27 | Ok(cfg) => cfg, 28 | Err(_) => { 29 | // Set default config and return it 30 | let mut default_cfg: PreferencesMap = PreferencesMap::new(); 31 | default_cfg.insert(DATABASE_URI_KEY.into(), "".into()); 32 | default_cfg.insert(DATABASE_NAME_KEY.into(), "".into()); 33 | default_cfg 34 | } 35 | }; 36 | 37 | let loaded_git_config = match PreferencesMap::::load(&APP_INFO, &GIT_CONFIG_PATH) { 38 | Ok(cfg) => cfg, 39 | Err(_) => { 40 | // Set default config and return it 41 | let mut default_cfg: PreferencesMap = PreferencesMap::new(); 42 | default_cfg.insert(GIT_YAML_PATH_KEY.into(), "".into()); 43 | default_cfg 44 | } 45 | }; 46 | 47 | AppConfig { 48 | db_config: loaded_db_config, 49 | git_config: loaded_git_config, 50 | } 51 | } 52 | 53 | // Public helper functions 54 | pub fn print(&self) { 55 | println!("{}", "Currently loaded database config:".cyan()); 56 | for (key, value) in &self.db_config { 57 | println!("{} : \"{}\"", key, value); 58 | } 59 | println!("\n{}", "Currently loaded repocheck config:".cyan()); 60 | for (key, value) in &self.git_config { 61 | println!("{} : \"{}\"", key, value); 62 | } 63 | } 64 | 65 | pub fn is_db_config_set(&self) -> bool { 66 | return !self.mongodb_uri().is_empty() && !self.mongodb_name().is_empty(); 67 | } 68 | 69 | // Config setter 70 | pub fn set_mongodb_uri(&mut self, url: &String) { 71 | self.set_db_config_value(DATABASE_URI_KEY, url); 72 | } 73 | 74 | pub fn set_mongodb_name(&mut self, name: &String) { 75 | self.set_db_config_value(DATABASE_NAME_KEY, name); 76 | } 77 | 78 | pub fn set_mongodb_collection(&mut self, name: &String) { 79 | self.set_db_config_value(DATABASE_BENCHMARK_COLLECTION_KEY, name); 80 | } 81 | 82 | pub fn set_repocheck_config_yaml(&mut self, repo_url: &String) { 83 | self.set_git_config_value(GIT_YAML_PATH_KEY, repo_url); 84 | } 85 | 86 | // Config getter 87 | pub fn mongodb_uri(&self) -> &String { 88 | self.get_db_config_value(DATABASE_URI_KEY) 89 | } 90 | 91 | pub fn mongodb_name(&self) -> &String { 92 | self.get_db_config_value(DATABASE_NAME_KEY) 93 | } 94 | 95 | pub fn mongodb_collection(&self) -> &String { 96 | self.get_db_config_value(DATABASE_BENCHMARK_COLLECTION_KEY) 97 | } 98 | 99 | pub fn repocheck_config_yaml(&self) -> &String { 100 | self.get_git_config_value(GIT_YAML_PATH_KEY) 101 | } 102 | 103 | // Private helper functions 104 | fn set_db_config_value(&mut self, key: &str, value: &str) { 105 | self.db_config.insert(key.into(), value.into()); 106 | self.db_config 107 | .save(&APP_INFO, DATABASE_CONFIG_PATH) 108 | .expect("Failed to save new default db config!"); 109 | println!("Config successfully saved: {:?}", self.db_config); 110 | } 111 | 112 | fn get_db_config_value(&self, key: &str) -> &String { 113 | self.db_config 114 | .get(key) 115 | .expect(&format!("Can't retrieve config value for key '{}'!", key)) 116 | } 117 | 118 | fn set_git_config_value(&mut self, key: &str, value: &str) { 119 | self.git_config.insert(key.into(), value.into()); 120 | self.git_config 121 | .save(&APP_INFO, GIT_CONFIG_PATH) 122 | .expect("Failed to save new default git config!"); 123 | println!("Config successfully saved: {:?}", self.git_config); 124 | } 125 | 126 | fn get_git_config_value(&self, key: &str) -> &String { 127 | self.git_config 128 | .get(key) 129 | .expect(&format!("Can't retrieve config value for key '{}'!", key)) 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /src/database.rs: -------------------------------------------------------------------------------- 1 | use crate::config::*; 2 | use crate::parse::*; 3 | 4 | use mongodb::bson; 5 | use mongodb::bson::{Bson, Regex}; 6 | use mongodb::sync::{Client, Collection}; 7 | use std::path::PathBuf; 8 | 9 | #[derive(Clone, Debug)] 10 | pub struct DataBase { 11 | pub client: Client, 12 | pub dbname: String, 13 | pub collection: String, 14 | } 15 | 16 | pub enum EntryFilter { 17 | ExeName(String), 18 | //Tag(String), 19 | //All(String, String), 20 | } 21 | 22 | impl DataBase { 23 | pub fn init(config: &AppConfig) -> DataBase { 24 | let mongodb_uri = config.mongodb_uri(); 25 | let mongodb_name = config.mongodb_name(); 26 | let mongodb_collection = config.mongodb_collection(); 27 | 28 | let client = Client::with_uri_str(mongodb_uri) 29 | .expect(&format!("Invalid database uri: {}.", mongodb_uri)); 30 | 31 | println!("Checking database connection ..."); 32 | 33 | client 34 | .database(mongodb_name) 35 | .run_command(bson::doc! {"ping": 1}, None) 36 | .expect("Could not connect to database!"); 37 | 38 | println!("Connected successfully!"); 39 | 40 | Self { 41 | client: client, 42 | dbname: mongodb_name.to_string(), 43 | collection: mongodb_collection.to_string(), 44 | } 45 | } 46 | 47 | pub fn push_last_results(&self, tag: Option) { 48 | let tag_value = tag.unwrap_or("".to_string()); 49 | let benchmark_collection = self.benchmark_collection(); 50 | 51 | let cumulated_results = parse_cumulated_benchmark_file(); 52 | let mut docs = vec![]; 53 | for result in cumulated_results { 54 | let result_bson = bson::to_bson(&result).unwrap(); 55 | let exe_name = exe_name(&result.context.executable); 56 | docs.push(bson::doc! {"exe_name": Bson::String(exe_name), "tag": Bson::String(tag_value.clone()), "results": result_bson}); 57 | } 58 | 59 | benchmark_collection 60 | .insert_many(docs, None) 61 | .expect("Could not insert benchmark results in database collection!"); 62 | } 63 | 64 | pub fn fetch(&self, entry_filter: EntryFilter) -> Vec { 65 | let benchmark_collection = self.benchmark_collection(); 66 | 67 | let filter = match entry_filter { 68 | EntryFilter::ExeName(reg_expr) => { 69 | bson::doc! { "exe_name": Regex{pattern: reg_expr, options: String::new()} } 70 | } 71 | //EntryFilter::Tag(tag) => bson::doc! { "tag": tag }, 72 | //EntryFilter::All(exe, tag) => bson::doc! { "exe_name": exe, "tag": tag }, 73 | }; 74 | println!("Using mongodb query: {}", filter); 75 | 76 | let cursor = benchmark_collection 77 | .find(filter, None) 78 | .expect("Could not fetch results from database!"); 79 | 80 | let mut fetched_results = vec![]; 81 | 82 | for result in cursor { 83 | match result { 84 | Ok(document) => { 85 | let entry: DataBaseEntry = bson::from_bson(Bson::Document(document)) 86 | .expect("Could not deserialize database entry!"); 87 | fetched_results.push(entry); 88 | } 89 | Err(e) => panic!(e), 90 | } 91 | } 92 | 93 | fetched_results 94 | } 95 | 96 | pub fn list_tags(&self) -> Vec { 97 | let benchmark_collection = self.benchmark_collection(); 98 | 99 | let tags = benchmark_collection 100 | .distinct( 101 | "tag", 102 | bson::doc! {"tag": {"$exists" : true, "$ne" : ""} }, 103 | None, 104 | ) 105 | .expect("Could not retrieve list of tags!"); 106 | 107 | tags.iter() 108 | .map(|tag| bson::from_bson(tag.clone()).unwrap()) 109 | .collect() 110 | } 111 | 112 | fn benchmark_collection(&self) -> Collection { 113 | self.client 114 | .database(&self.dbname) 115 | .collection(&self.collection) 116 | } 117 | } 118 | 119 | fn exe_name(pathbuf: &PathBuf) -> String { 120 | pathbuf 121 | .as_path() 122 | .file_name() 123 | .unwrap() 124 | .to_str() 125 | .expect("Could not convert executable path to valid string!") 126 | .to_string() 127 | } 128 | -------------------------------------------------------------------------------- /src/exec.rs: -------------------------------------------------------------------------------- 1 | use crate::parse::*; 2 | 3 | use indicatif::{ProgressBar, ProgressStyle}; 4 | use std::env; 5 | use std::fs::remove_file; 6 | use std::path::PathBuf; 7 | use std::process::Command; 8 | 9 | pub fn execute_benchmarks>>( 10 | exe_paths: PathList, 11 | ) -> Vec { 12 | let exe_count = exe_paths.as_ref().len() as u64; 13 | let bar = ProgressBar::new(exe_count); 14 | let sty = ProgressStyle::default_bar() 15 | .template("[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}"); 16 | //.progress_chars("##-"); 17 | bar.set_style(sty); 18 | 19 | let result_file_path = result_file_path(); 20 | 21 | let result_file_path_str = result_file_path 22 | .to_str() 23 | .expect("Could not convert benchmark result file path to str!"); 24 | 25 | let mut bm_all_results: Vec = Vec::new(); 26 | 27 | for exe_path in exe_paths.as_ref() { 28 | let exe_name = exe_path.as_path().file_name().unwrap(); 29 | bar.set_message(&format!( 30 | "Executing benchmark \"{}\"...", 31 | exe_name.to_string_lossy() 32 | )); 33 | 34 | let benchmark_output = Command::new(exe_path) 35 | .arg(format!("--benchmark_out={}", result_file_path_str)) 36 | .arg("--benchmark_out_format=json") 37 | .output() 38 | .expect("failed to execute process"); 39 | 40 | assert!( 41 | benchmark_output.status.success(), 42 | "{} returned with exit code {}!", 43 | exe_name.to_string_lossy(), 44 | benchmark_output.status 45 | ); 46 | 47 | let cur_bm_results = parse_single_benchmark_file(&result_file_path); 48 | 49 | remove_file(result_file_path.as_path()).expect(&format!( 50 | "Unable to remove benchmark result file \"{}\"!", 51 | result_file_path_str 52 | )); 53 | 54 | bm_all_results.push(cur_bm_results); 55 | 56 | bar.inc(1); 57 | } 58 | bar.finish(); 59 | 60 | println!("Benchmark execution finished successfully!"); 61 | 62 | bm_all_results 63 | } 64 | 65 | fn result_file_path() -> PathBuf { 66 | let mut temp_dir = env::temp_dir(); 67 | temp_dir.push("beast_temp_benchmarkoutput.json"); 68 | return temp_dir; 69 | } 70 | -------------------------------------------------------------------------------- /src/find.rs: -------------------------------------------------------------------------------- 1 | use is_executable::IsExecutable; 2 | use regex::Regex; 3 | use std::path::{Path, PathBuf}; 4 | use walkdir::{DirEntry, Error, WalkDir}; 5 | 6 | pub fn find_executables>(root: P, exe_pattern: &str) -> Vec { 7 | let re = Regex::new(exe_pattern).expect(&format!("Could not compile regex: {}", exe_pattern)); 8 | 9 | WalkDir::new(root) 10 | .into_iter() 11 | .filter_map(|entry: Result| entry.ok()) 12 | .filter(|entry| entry.file_type().is_file() && entry.path().is_executable()) 13 | .filter(|entry| { 14 | re.is_match( 15 | entry 16 | .path() 17 | .file_name() 18 | .expect("This should not happen!") 19 | .to_str() 20 | .expect("Conversion of filename to string failed!"), 21 | ) 22 | }) 23 | .map(|entry| entry.path().to_owned()) 24 | .collect() 25 | } 26 | -------------------------------------------------------------------------------- /src/logger.rs: -------------------------------------------------------------------------------- 1 | use colored::*; 2 | 3 | pub fn error_and_exit(msg: &str, e: &dyn std::error::Error) -> ! { 4 | eprintln!("{} {} [{}]!", "ERROR:".red(), msg.red(), e); 5 | std::process::exit(1); 6 | } 7 | 8 | pub fn _warn(msg: &str, e: &dyn std::error::Error) { 9 | println!("{} {} [{}]!", "WARNING:".yellow(), msg.yellow(), e); 10 | } 11 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use clap::{crate_name, crate_version, App, Arg, ArgMatches, SubCommand}; 2 | use find::find_executables; 3 | use std::path::Path; 4 | 5 | mod config; 6 | mod database; 7 | mod exec; 8 | mod find; 9 | mod logger; 10 | mod parse; 11 | mod plot; 12 | mod repocheck; 13 | 14 | use crate::config::*; 15 | use crate::database::*; 16 | use crate::exec::*; 17 | use crate::logger::*; 18 | use crate::parse::*; 19 | use crate::plot::*; 20 | 21 | fn main() -> Result<(), std::io::Error> { 22 | let matches = App::new(crate_name!()) 23 | .version(crate_version!()) 24 | .about("(be)nchmark (a)nalysis and (s)ummary (t)ool") 25 | .arg(Arg::from_usage( 26 | "[rootdir], -d, --dir=[DIR] 'Root directory to use for benchmark search'", 27 | )) 28 | .arg( 29 | Arg::from_usage( 30 | "[filter], -f, --filter=[REGEXP] 'Only run benchmark executables matching the regex pattern'", 31 | ) 32 | .default_value(".*benchmark[^.]*$"), 33 | ) 34 | .arg( 35 | Arg::with_name("list") 36 | .help("List benchmark executables found by the current regex (see '-f')") 37 | .short("l") 38 | .long("list") 39 | //.requires("filter") 40 | ) 41 | .arg( 42 | Arg::from_usage( 43 | "[timeunit], -t, --timeunit=[TIMEUNIT] 'Time unit for plots (possible values are: ms, us, ns)'", 44 | ) 45 | .default_value("us"), 46 | ) 47 | .arg( 48 | Arg::with_name("lineplot") 49 | .help("Create lineplot instead of bar plot for benchmarks with argument list") 50 | .long("lineplot") 51 | ) 52 | .arg( 53 | Arg::from_usage( 54 | "[xtitle], --xtitle=[STRING] 'Line plot x-axis title to display'", 55 | ) 56 | .default_value("N"), 57 | ) 58 | .arg( 59 | Arg::with_name("noplot") 60 | .help("Do not create plot for benchmark results, e.g. when using beast in scripts") 61 | .long("noplot") 62 | ) 63 | .subcommand(SubCommand::with_name("config") 64 | .about("Handle beast's configuration, e.g. the mongodb access or the git settings") 65 | .arg( 66 | Arg::from_usage( 67 | "[mongodb_uri], --set-db-uri=[URI] 'Sets a mongodb URI for push/fetch of benchmark results'", 68 | ), 69 | ) 70 | .arg( 71 | Arg::from_usage( 72 | "[mongodb_dbname], --set-db-name=[NAME] 'Sets a mongodb database to work with'", 73 | ), 74 | ) 75 | .arg( 76 | Arg::from_usage( 77 | "[mongodb_collection], --set-db-collection=[COLLECTION] 'Sets a mongodb collection to work with'", 78 | ), 79 | ) 80 | .arg( 81 | Arg::from_usage( 82 | "[repocheck_yaml_path], --set-repocheck-yaml=[PATH] 'Sets path to the repocheck settings yaml file'", 83 | ), 84 | ) 85 | ) 86 | .subcommand(SubCommand::with_name("plotlast") 87 | .about("Plots benchmark results from last run \n\ 88 | Note: Supports the '-t' option after main command to plot with desired time unit.") 89 | ) 90 | .subcommand(SubCommand::with_name("dbpush") 91 | .about("Pushes previously exported benchmark results to the configured database") 92 | .arg( 93 | Arg::from_usage( 94 | "[tag], --tag=[TAGNAME] 'Adds a tag to the pushed results.'", 95 | ), 96 | ) 97 | ) 98 | .subcommand(SubCommand::with_name("dbplot") 99 | .about("Fetches all benchmark results from the configured database collection and plot them as time series \n\ 100 | Note: Supports the '-t' option after main command to plot with desired time unit.") 101 | .arg( 102 | Arg::from_usage( 103 | "[fetchfilter], --fetchfilter=[REGEXP] 'Filters executables to plot with a mongodb compatible regexp'", 104 | ) 105 | .default_value(".*"), 106 | ) 107 | ) 108 | .subcommand(SubCommand::with_name("dblist") 109 | .about("Lists distinct tags in current benchmark collection") 110 | ) 111 | .subcommand(SubCommand::with_name("repocheck") 112 | .about("Runs beast for the commit range previously specified in the yaml set via 'beast config'") 113 | .arg( 114 | Arg::with_name("noclean") 115 | .help("Run without cleaning previous results. Run will continue with commits without existing results") 116 | .long("no-clean") 117 | ) 118 | .arg( 119 | Arg::with_name("plot") 120 | .help("Plot repocheck results from previous run, configured in the according yaml") 121 | .long("plot") 122 | ) 123 | ) 124 | .get_matches(); 125 | 126 | let mut config = AppConfig::init(); 127 | 128 | // Handle subcommands 129 | handle_config_commands(&matches, &mut config); 130 | handle_database_commands(&matches, &config); 131 | handle_repocheck_commands(&matches, &config); 132 | 133 | // Parse main options 134 | let root_dir = match matches.value_of("rootdir") { 135 | Some(valid_val) => Path::new(valid_val).to_path_buf(), 136 | _ => match std::env::current_dir() { 137 | Ok(path_buf) => path_buf, 138 | Err(err) => panic!("Can't retrieve current directory: {:?}", err), 139 | }, 140 | }; 141 | 142 | let filter_pattern = matches.value_of("filter").unwrap(); 143 | let plot_time_unit = matches.value_of("timeunit").unwrap(); 144 | 145 | // Plot last results 146 | if let Some(ref _matches) = matches.subcommand_matches("plotlast") { 147 | let last_results = parse::parse_cumulated_benchmark_file(); 148 | if matches.is_present("lineplot") { 149 | let x_title = matches.value_of("xtitle").unwrap(); 150 | plot_all_as_lines(&last_results, plot_time_unit, x_title); 151 | } else { 152 | plot_all_as_bars(&last_results, plot_time_unit); 153 | } 154 | return Ok(()); 155 | } 156 | 157 | // Benchmark execution handling 158 | println!("Root scan directory: {:?}", root_dir.as_os_str()); 159 | 160 | let mut benchmark_paths = find_executables(root_dir, filter_pattern); 161 | 162 | if benchmark_paths.is_empty() { 163 | println!("No benchmarks found to run!"); 164 | return Ok(()); 165 | } 166 | 167 | benchmark_paths.sort(); 168 | 169 | if matches.is_present("list") { 170 | println!("Found benchmark executables:\n"); 171 | println!( 172 | "{}", 173 | benchmark_paths 174 | .iter() 175 | .fold(String::new(), |total_str, arg| total_str 176 | + &arg.as_path().to_string_lossy() 177 | + "\n") 178 | ); 179 | return Ok(()); 180 | } 181 | 182 | let benchmark_results = execute_benchmarks(benchmark_paths); 183 | export_cumulated_results(&benchmark_results); 184 | 185 | if !matches.is_present("noplot") { 186 | if matches.is_present("lineplot") { 187 | let x_title = matches.value_of("xtitle").unwrap(); 188 | plot_all_as_lines(&benchmark_results, plot_time_unit, x_title); 189 | } else { 190 | plot_all_as_bars(&benchmark_results, plot_time_unit); 191 | } 192 | } 193 | 194 | return Ok(()); 195 | } 196 | 197 | fn handle_config_commands(matches: &ArgMatches, config: &mut AppConfig) { 198 | if let Some(ref matches) = matches.subcommand_matches("config") { 199 | config.print(); 200 | match matches.value_of("mongodb_uri") { 201 | Some(provided_url) => config.set_mongodb_uri(&provided_url.to_string()), 202 | None => {} 203 | } 204 | match matches.value_of("mongodb_dbname") { 205 | Some(provided_mongodb_name) => { 206 | config.set_mongodb_name(&provided_mongodb_name.to_string()) 207 | } 208 | None => {} 209 | } 210 | match matches.value_of("mongodb_collection") { 211 | Some(provided_mongodb_collection) => { 212 | config.set_mongodb_collection(&provided_mongodb_collection.to_string()) 213 | } 214 | None => {} 215 | } 216 | match matches.value_of("repocheck_yaml_path") { 217 | Some(yaml_path) => { 218 | match std::fs::canonicalize(yaml_path) { 219 | Ok(path) => config 220 | .set_repocheck_config_yaml(&path.as_path().to_string_lossy().to_string()), 221 | Err(e) => error_and_exit( 222 | &format!("Path '{}' does not exist or can't be read", yaml_path), 223 | &e, 224 | ), 225 | }; 226 | } 227 | None => {} 228 | } 229 | std::process::exit(0); 230 | } 231 | } 232 | 233 | fn handle_database_commands(matches: &ArgMatches, config: &AppConfig) { 234 | let plot_time_unit = matches.value_of("timeunit").unwrap(); 235 | 236 | if let Some(ref submatches) = matches.subcommand_matches("dbpush") { 237 | if config.is_db_config_set() { 238 | let db = DataBase::init(&config); 239 | let tag_option = submatches.value_of("tag").map(String::from); 240 | db.push_last_results(tag_option); 241 | } else { 242 | print_config_not_set(); 243 | } 244 | std::process::exit(0); 245 | } 246 | if let Some(ref submatches) = matches.subcommand_matches("dbplot") { 247 | if config.is_db_config_set() { 248 | let filter_pattern = submatches.value_of("fetchfilter").unwrap_or(".*"); 249 | let db = DataBase::init(&config); 250 | 251 | let results = db.fetch(EntryFilter::ExeName(filter_pattern.to_string())); 252 | //TODO: add support for "tag" + "both" 253 | 254 | if results.is_empty() { 255 | println!("Did not find any matching results. Nothing to plot!"); 256 | std::process::exit(0); 257 | } 258 | 259 | plot_db_entries(&results, plot_time_unit); 260 | } else { 261 | print_config_not_set(); 262 | } 263 | std::process::exit(0); 264 | } 265 | if let Some(ref _submatches) = matches.subcommand_matches("dblist") { 266 | if config.is_db_config_set() { 267 | let db = DataBase::init(&config); 268 | let tags = db.list_tags(); 269 | print!("\nFound tags:\n{:?}\n", tags); 270 | } 271 | std::process::exit(0); 272 | } 273 | } 274 | 275 | fn handle_repocheck_commands(matches: &ArgMatches, config: &AppConfig) { 276 | let plot_time_unit = matches.value_of("timeunit").unwrap(); 277 | 278 | if let Some(ref submatches) = matches.subcommand_matches("repocheck") { 279 | let yaml_path = Path::new(config.repocheck_config_yaml()); 280 | let mut settings = repocheck::parse(yaml_path); 281 | 282 | if submatches.is_present("noclean") { 283 | settings.no_clean = Some(true); 284 | } 285 | 286 | if submatches.is_present("plot") { 287 | let results = repocheck::collect_repocheck_results(&settings); 288 | plot_all_as_commit_series(&results, plot_time_unit); 289 | std::process::exit(0); 290 | } 291 | 292 | repocheck::run(&settings); 293 | std::process::exit(0); 294 | } 295 | } 296 | 297 | fn print_config_not_set() { 298 | println!("database config is not yet set. Use 'beast config' for this."); 299 | } 300 | -------------------------------------------------------------------------------- /src/parse.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use serde_json::json; 3 | use std::env; 4 | use std::fs::File; 5 | use std::io::BufReader; 6 | use std::path::{Path, PathBuf}; 7 | 8 | const LAST_RESULTS_FILENAME: &str = "beast_temp_lastresults.json"; 9 | 10 | #[derive(Serialize, Deserialize, Debug)] 11 | pub struct DataBaseEntry { 12 | pub exe_name: String, 13 | pub tag: String, 14 | pub results: BenchmarkResults, 15 | } 16 | 17 | #[derive(Serialize, Deserialize, Debug)] 18 | pub struct BenchmarkResults { 19 | pub commit: Option, 20 | pub context: BenchmarkContext, 21 | pub benchmarks: Vec, 22 | } 23 | 24 | #[derive(Serialize, Deserialize, Debug)] 25 | pub struct BenchmarkContext { 26 | pub date: String, 27 | pub executable: PathBuf, 28 | pub num_cpus: i32, 29 | pub mhz_per_cpu: i32, 30 | } 31 | 32 | #[derive(Serialize, Deserialize, Debug)] 33 | pub struct BenchmarkResult { 34 | pub name: String, 35 | pub iterations: i32, 36 | pub real_time: f64, 37 | pub cpu_time: f64, 38 | pub time_unit: Option, 39 | } 40 | 41 | pub fn parse_single_benchmark_file>(file_path: P) -> BenchmarkResults { 42 | let json = json_from_file(file_path); 43 | let bm_context = json.get("context").unwrap(); 44 | let bm_list = json["benchmarks"].as_array().unwrap(); 45 | 46 | let mut results = BenchmarkResults { 47 | commit: Some("".to_string()), 48 | context: serde_json::from_value(bm_context.clone()).unwrap(), 49 | benchmarks: Vec::new(), 50 | }; 51 | 52 | for single_bm in bm_list { 53 | let bm_result: BenchmarkResult = serde_json::from_value(single_bm.clone()).unwrap(); 54 | // println!("{:?}", bm_result); 55 | results.benchmarks.push(bm_result); 56 | } 57 | 58 | return results; 59 | } 60 | 61 | pub fn parse_cumulated_benchmark_file() -> Vec { 62 | let file_path = exported_results_file_path(); 63 | if !Path::exists(file_path.as_path()) { 64 | println!("No benchmark result file found! Run 'beast' to create one!"); 65 | std::process::exit(0); 66 | } 67 | let cumulated_results = json_from_file(file_path); 68 | return serde_json::from_value(cumulated_results) 69 | .expect("Could not deserialize JsonValue from cumulated benchmark file!"); 70 | } 71 | 72 | pub fn export_cumulated_results(cumulated_results: &Vec) { 73 | let export_file = exported_results_file_path(); 74 | let export_file_path_str = export_file.as_path().to_string_lossy(); 75 | 76 | let f = File::create(&export_file) 77 | .expect(&format!("Could not create file {}!", export_file_path_str)); 78 | let results_json_val = json!(*cumulated_results); 79 | 80 | serde_json::to_writer(&f, &results_json_val).expect(&format!( 81 | "Could not write to file {}!", 82 | export_file_path_str 83 | )); 84 | } 85 | 86 | pub fn export_results_to_file(results: &Vec, file_path: &Path) { 87 | let f = File::create(file_path).expect(&format!( 88 | "Could not create file {}!", 89 | file_path.to_string_lossy() 90 | )); 91 | let results_json_val = json!(*results); 92 | 93 | serde_json::to_writer(&f, &results_json_val).expect(&format!( 94 | "Could not write to file {}!", 95 | file_path.to_string_lossy() 96 | )); 97 | } 98 | 99 | pub fn json_from_file>(file_path: P) -> serde_json::Value { 100 | let result_file = File::open(file_path).expect("Benchmark result file not found!"); 101 | let reader = BufReader::new(result_file); 102 | return serde_json::from_reader(reader).expect("JSON was not well-formatted!"); 103 | } 104 | 105 | fn exported_results_file_path() -> PathBuf { 106 | let mut temp_dir = env::temp_dir(); 107 | temp_dir.push(LAST_RESULTS_FILENAME); 108 | return temp_dir; 109 | } 110 | -------------------------------------------------------------------------------- /src/plot.rs: -------------------------------------------------------------------------------- 1 | use crate::parse::*; 2 | 3 | use plotly::common::{DashType, Line, LineShape, Mode, Title}; 4 | use plotly::layout::{Axis, BarMode, Layout}; 5 | use plotly::{Bar, Plot, Scatter}; 6 | use std::collections::HashMap; 7 | use std::time::Duration; 8 | 9 | pub fn plot_all_as_bars(all_results: &Vec, plot_time_unit: &str) { 10 | // use first benchmark for cpu info as all results are retrieved on the same machine 11 | let plot_title = format!( 12 | "CPU count: {}, MHz/CPU: {}", 13 | all_results[0].context.num_cpus, all_results[0].context.mhz_per_cpu 14 | ) 15 | .to_string(); 16 | 17 | let y_title = format!("CPU runtime [{}]", plot_time_unit).to_string(); 18 | let mut plot = Plot::new(); 19 | 20 | let layout = Layout::new() 21 | .title(Title::from(plot_title.as_str())) 22 | .bar_mode(BarMode::Group) 23 | .bar_group_gap(0.1) 24 | .x_axis(Axis::new().auto_margin(true)) 25 | .y_axis(Axis::new().title(Title::from(y_title.as_str()))); 26 | 27 | plot.set_layout(layout); 28 | 29 | for bm_results in all_results { 30 | let mut sub_bm_names = vec![]; 31 | let mut sub_bm_cpu_times = vec![]; 32 | let bm_results_name = bm_results.context.executable.as_path().file_name().unwrap(); 33 | 34 | // collect sub benchmarks results for trace 35 | for sub_bm_res in &bm_results.benchmarks { 36 | let sub_bm_duration = 37 | from_benchmark_time(sub_bm_res.time_unit.as_ref(), sub_bm_res.cpu_time as u64); 38 | let sub_bm_converted_cpu_time = convert_time_to_unit(sub_bm_duration, plot_time_unit); 39 | 40 | sub_bm_names.push(sub_bm_res.name.clone()); 41 | sub_bm_cpu_times.push(sub_bm_converted_cpu_time); 42 | } 43 | 44 | plot.add_trace( 45 | Bar::new(sub_bm_names, sub_bm_cpu_times).name(&bm_results_name.to_string_lossy()), 46 | ); 47 | } 48 | 49 | plot.show(); 50 | } 51 | 52 | pub fn plot_all_as_lines(all_results: &Vec, plot_time_unit: &str, x_title: &str) { 53 | // use first benchmark for cpu info as all results are retrieved on the same machine 54 | let plot_title = format!( 55 | "CPU count: {}, MHz/CPU: {}", 56 | all_results[0].context.num_cpus, all_results[0].context.mhz_per_cpu 57 | ) 58 | .to_string(); 59 | let y_title = format!("CPU runtime [{}]", plot_time_unit).to_string(); 60 | let mut plot = Plot::new(); 61 | 62 | for bm_results in all_results { 63 | let mut x_values: HashMap> = HashMap::new(); 64 | let mut y_values: HashMap> = HashMap::new(); 65 | // let bm_results_name = bm_results.context.executable.as_path().file_name().unwrap(); 66 | 67 | // collect sub benchmarks results for traces 68 | for sub_bm_res in &bm_results.benchmarks { 69 | let splitted_name = sub_bm_res.name.split("/"); 70 | let splitted_name_vec = splitted_name.collect::>(); 71 | let sub_bm_name = splitted_name_vec[0]; 72 | let sub_bm_x_val = splitted_name_vec[1].parse::().unwrap(); 73 | 74 | x_values 75 | .entry(sub_bm_name.to_string()) 76 | .or_insert(Vec::new()) 77 | .push(sub_bm_x_val); 78 | 79 | let sub_bm_duration = 80 | from_benchmark_time(sub_bm_res.time_unit.as_ref(), sub_bm_res.cpu_time as u64); 81 | let sub_bm_converted_cpu_time = convert_time_to_unit(sub_bm_duration, plot_time_unit); 82 | 83 | y_values 84 | .entry(sub_bm_name.to_string()) 85 | .or_insert(Vec::new()) 86 | .push(sub_bm_converted_cpu_time); 87 | } 88 | 89 | for bm_name in y_values.keys() { 90 | let trace = Scatter::new(x_values[bm_name].to_owned(), y_values[bm_name].to_owned()) 91 | .mode(Mode::LinesMarkers) 92 | .name(bm_name) 93 | .line(Line::new().dash(DashType::Dash)); 94 | plot.add_trace(trace); 95 | } 96 | } 97 | 98 | let layout = Layout::new() 99 | .title(Title::from(plot_title.as_str())) 100 | .x_axis(Axis::new().title(Title::from(x_title))) 101 | .y_axis(Axis::new().title(Title::from(y_title.as_str()))); 102 | // TODO: create common tick list from all x-value-vectors (use tick_mode(TickMode::Array).tick_values()) 103 | 104 | plot.set_layout(layout); 105 | plot.show(); 106 | } 107 | 108 | // TODO: refine plot options, e.g. include means and variances in chart 109 | pub fn plot_all_as_commit_series(results: &Vec, plot_time_unit: &str) { 110 | let mut xlabels: HashMap> = HashMap::new(); 111 | let mut cpu_times: HashMap> = HashMap::new(); 112 | let mut tags: HashMap> = HashMap::new(); 113 | 114 | // collect benchmark data per commit for each "exename_benchmarkname" 115 | for result in results { 116 | for benchmark in &result.benchmarks { 117 | let exe_name = result.context.executable.file_name().unwrap(); 118 | let trace_name = exe_name.to_string_lossy().to_owned() + "_" + benchmark.name.as_str(); 119 | let trace_name = trace_name.to_string(); 120 | 121 | // build current xlabel 122 | let commit_id = result.commit.as_ref().unwrap(); 123 | let xlabel = build_label(commit_id.as_str(), ""); 124 | 125 | // set cpu_time based on time unit for plot 126 | let cpu_time_as_duration = 127 | from_benchmark_time(benchmark.time_unit.as_ref(), benchmark.cpu_time as u64); 128 | let converted_cpu_time = convert_time_to_unit(cpu_time_as_duration, plot_time_unit); 129 | 130 | xlabels 131 | .entry(trace_name.clone()) 132 | .or_insert(Vec::new()) 133 | .push(xlabel); 134 | cpu_times 135 | .entry(trace_name.clone()) 136 | .or_insert(Vec::new()) 137 | .push(converted_cpu_time); 138 | tags.entry(trace_name.clone()) 139 | .or_insert(Vec::new()) 140 | .push(commit_id.clone()); 141 | } 142 | } 143 | 144 | // create plot 145 | let y_title = format!("CPU runtime [{}]", plot_time_unit).to_string(); 146 | 147 | let layout = Layout::new() 148 | .title(Title::from("Benchmark results over time")) 149 | .x_axis(Axis::new().title(Title::from("Commit")).auto_margin(true)) 150 | .y_axis(Axis::new().title(Title::from(y_title.as_str()))); 151 | 152 | let mut plot = Plot::new(); 153 | 154 | plot.set_layout(layout); 155 | 156 | for trace_name in xlabels.keys() { 157 | let trace = Scatter::new(xlabels[trace_name].clone(), cpu_times[trace_name].clone()) 158 | .mode(Mode::LinesMarkers) 159 | .name(trace_name) 160 | .text_array(tags[trace_name].clone()) 161 | .line(Line::new().shape(LineShape::Hv)); 162 | 163 | plot.add_trace(trace); 164 | } 165 | 166 | plot.show(); 167 | } 168 | 169 | pub fn plot_db_entries(db_entries: &Vec, plot_time_unit: &str) { 170 | let mut xlabels: HashMap> = HashMap::new(); 171 | let mut cpu_times: HashMap> = HashMap::new(); 172 | let mut tags: HashMap> = HashMap::new(); 173 | 174 | // collect time series data for each "exename_benchmarkname" 175 | for db_entry in db_entries { 176 | for single_result in &db_entry.results.benchmarks { 177 | let trace_name = db_entry.exe_name.clone() + "_" + single_result.name.as_str(); 178 | 179 | // build current xlabel 180 | let xlabel = build_label(db_entry.results.context.date.as_str(), ""); 181 | 182 | // set cpu_time based on time unit for plot 183 | let cpu_time_as_duration = from_benchmark_time( 184 | single_result.time_unit.as_ref(), 185 | single_result.cpu_time as u64, 186 | ); 187 | let converted_cpu_time = convert_time_to_unit(cpu_time_as_duration, plot_time_unit); 188 | 189 | xlabels 190 | .entry(trace_name.clone()) 191 | .or_insert(Vec::new()) 192 | .push(xlabel); 193 | cpu_times 194 | .entry(trace_name.clone()) 195 | .or_insert(Vec::new()) 196 | .push(converted_cpu_time); 197 | tags.entry(trace_name.clone()) 198 | .or_insert(Vec::new()) 199 | .push(db_entry.tag.clone()); 200 | } 201 | } 202 | 203 | // create plot 204 | let y_title = format!("CPU runtime [{}]", plot_time_unit).to_string(); 205 | 206 | let layout = Layout::new() 207 | .title(Title::from("Benchmark results over time")) 208 | .x_axis(Axis::new().title(Title::from("Date")).auto_margin(true)) 209 | .y_axis(Axis::new().title(Title::from(y_title.as_str()))); 210 | 211 | let mut plot = Plot::new(); 212 | 213 | plot.set_layout(layout); 214 | 215 | for trace_name in xlabels.keys() { 216 | let trace = Scatter::new(xlabels[trace_name].clone(), cpu_times[trace_name].clone()) 217 | .mode(Mode::LinesMarkers) 218 | .name(trace_name) 219 | .text_array(tags[trace_name].clone()) 220 | .line(Line::new().shape(LineShape::Hv)); 221 | 222 | plot.add_trace(trace); 223 | } 224 | 225 | plot.show(); 226 | } 227 | 228 | fn from_benchmark_time(from_time_unit: Option<&String>, time: u64) -> Duration { 229 | match from_time_unit { 230 | Some(from_time_unit) => match from_time_unit.as_ref() { 231 | "ns" => Duration::from_nanos(time), 232 | "us" => Duration::from_micros(time), 233 | "ms" => Duration::from_millis(time), 234 | _ => panic!("Unknown time unit provided!"), 235 | }, 236 | None => { 237 | println!("No time unit was provided. Assuming ns!"); 238 | Duration::from_nanos(time) 239 | } 240 | } 241 | } 242 | 243 | fn convert_time_to_unit(duration: Duration, time_unit: &str) -> f64 { 244 | let converted_time = match time_unit { 245 | "ns" => duration.as_nanos(), 246 | "us" => duration.as_micros(), 247 | "ms" => duration.as_millis(), 248 | _ => panic!("Unknown time unit provided!"), 249 | }; 250 | converted_time as f64 251 | } 252 | 253 | fn build_label(date_time: &str, tag: &str) -> String { 254 | match tag { 255 | "" => date_time.to_string(), 256 | _ => String::new() + date_time + " (" + tag + ")", 257 | } 258 | } 259 | 260 | #[cfg(test)] 261 | mod tests { 262 | use super::*; 263 | #[test] 264 | fn test_build_label_function() { 265 | assert_eq!(build_label("Test", "123"), "Test (123)"); 266 | assert_eq!(build_label("Test", ""), "Test"); 267 | } 268 | } 269 | -------------------------------------------------------------------------------- /src/repocheck.rs: -------------------------------------------------------------------------------- 1 | use crate::exec::*; 2 | use crate::find::*; 3 | use crate::logger::*; 4 | use crate::parse::*; 5 | 6 | use colored::*; 7 | use execute::{shell, Execute}; 8 | use git2::{Commit, Error, Oid, Repository}; 9 | use serde::{Deserialize, Serialize}; 10 | use std::fmt::Write; 11 | use std::fs::{self, File}; 12 | use std::io::BufReader; 13 | use std::path::{Path, PathBuf}; 14 | 15 | #[derive(Serialize, Deserialize, Debug)] 16 | pub struct RepocheckSettings { 17 | version: u32, 18 | repo_path: PathBuf, 19 | branch_name: String, 20 | from_commit: String, 21 | to_commit: String, 22 | build_commands: String, 23 | benchmark_regex: String, 24 | pub no_clean: Option, // special flag to e.g. continue from previous run 25 | } 26 | 27 | pub fn parse>(yaml_path: P) -> RepocheckSettings { 28 | match File::open(yaml_path) { 29 | Ok(f) => match serde_yaml::from_reader(BufReader::new(f)) { 30 | Ok(yaml_val) => yaml_val, 31 | Err(e) => error_and_exit("repocheck yaml has invalid format", &e), 32 | }, 33 | Err(e) => { 34 | error_and_exit("Could not open repocheck yaml", &e); 35 | } 36 | } 37 | } 38 | 39 | pub fn collect_repocheck_results(settings: &RepocheckSettings) -> Vec { 40 | let full_repo_path = match std::fs::canonicalize(settings.repo_path.as_path()) { 41 | Ok(path) => path, 42 | Err(e) => { 43 | error_and_exit("Invalid path to repository", &e); 44 | } 45 | }; 46 | 47 | let export_dir = export_dir(&full_repo_path, &settings.branch_name); 48 | 49 | let mut collected_benchmark_results: Vec = vec![]; 50 | if export_dir.is_dir() { 51 | println!("Files to parse:"); 52 | for entry in fs::read_dir(export_dir).unwrap() { 53 | let repocheck_file_path = entry.unwrap().path(); 54 | println!("{}", &repocheck_file_path.to_string_lossy()); 55 | let single_file_results = json_from_file(repocheck_file_path.as_path()); 56 | let mut json: Vec = serde_json::from_value(single_file_results) 57 | .expect("Could not deserialize JsonValue from single benchmark file!"); 58 | collected_benchmark_results.append(&mut json); 59 | } 60 | } 61 | collected_benchmark_results 62 | } 63 | 64 | pub fn run(settings: &RepocheckSettings) { 65 | let full_repo_path = match std::fs::canonicalize(settings.repo_path.as_path()) { 66 | Ok(path) => path, 67 | Err(e) => { 68 | error_and_exit("Invalid path to repository", &e); 69 | } 70 | }; 71 | 72 | let repo = match Repository::open(full_repo_path.as_path()) { 73 | Ok(repo) => repo, 74 | Err(e) => { 75 | error_and_exit("Could not open repository", &e); 76 | } 77 | }; 78 | 79 | if repo.state() != git2::RepositoryState::Clean { 80 | let e = Error::from_str("RepositoryState != Clean"); 81 | error_and_exit("Clean up repository state first!", &e); 82 | } 83 | 84 | println!( 85 | "Checking out branch '{}' in repository '{}'...", 86 | settings.branch_name, 87 | full_repo_path.to_string_lossy() 88 | ); 89 | if let Err(e) = checkout_branch(&repo, &settings.branch_name) { 90 | error_and_exit("Could not checkout specified branch", &e); 91 | } 92 | println!("{}\n", "Successful!".green()); 93 | 94 | // "continue run" or "clean run" 95 | match &settings.no_clean { 96 | Some(true) => { 97 | println!("Will not delete previous results!"); 98 | } 99 | Some(false) | None => { 100 | let export_dir_path = export_dir(&full_repo_path, &settings.branch_name); 101 | if export_dir_path.is_dir() { 102 | println!( 103 | "Deleting previous results in directory {}...", 104 | export_dir_path.as_path().to_string_lossy() 105 | ); 106 | fs::remove_dir_all(export_dir_path).expect("Could not delete directory!"); 107 | } 108 | } 109 | } 110 | 111 | println!("Walking specified commit range..."); 112 | if let Err(e) = walk_commits(&repo, &full_repo_path, &settings) { 113 | error_and_exit("Could not walk through specified commit range", &e); 114 | } 115 | println!("{}\n", "Successful!".green()); 116 | } 117 | 118 | fn walk_commits( 119 | repo: &Repository, 120 | full_repo_path: &Path, 121 | settings: &RepocheckSettings, 122 | ) -> Result<(), Error> { 123 | let from_commit_oid = Oid::from_str(settings.from_commit.as_str())?; 124 | let to_commit_oid = Oid::from_str(settings.to_commit.as_str())?; 125 | let from_commit_parent = repo.find_commit(from_commit_oid)?.parent(0)?; 126 | 127 | let mut revwalk = repo.revwalk()?; 128 | revwalk.push(to_commit_oid)?; 129 | revwalk.hide(from_commit_parent.id())?; 130 | revwalk.simplify_first_parent()?; 131 | revwalk.set_sorting(git2::Sort::REVERSE | git2::Sort::TIME)?; 132 | 133 | for rev in revwalk { 134 | let commit = repo.find_commit(rev?)?; 135 | 136 | // create filename to save results 137 | let commit_id_str = id_to_str(commit.id().as_bytes()); 138 | let export_file_name = "commit_".to_string() + commit_id_str.as_str() + ".json"; 139 | let export_dir = export_dir(full_repo_path, &settings.branch_name); 140 | let mut export_file_path = export_dir; 141 | export_file_path.push(Path::new(&export_file_name)); 142 | 143 | if let Some(true) = settings.no_clean.as_ref() { 144 | if export_file_path.exists() { 145 | println!( 146 | "Results for commit {} already present. Continue...", 147 | commit_id_str 148 | ); 149 | continue; 150 | } 151 | } 152 | 153 | checkout_commit(repo, &commit)?; 154 | 155 | println!("Building for commit {}...", &commit.id()); 156 | 157 | let repo_workdir = repo.workdir().unwrap(); 158 | println!( 159 | "Using working directory {}...", 160 | repo_workdir.to_string_lossy() 161 | ); 162 | 163 | for cmd in settings.build_commands.lines() { 164 | println!("Executing cmd: {}", cmd.blue()); 165 | let mut build_cmd = shell(cmd); 166 | let output = match build_cmd.current_dir(repo_workdir).execute_output() { 167 | Ok(res) => res, 168 | Err(e) => error_and_exit("Command execution error", &e), 169 | }; 170 | let output_str = String::from_utf8(output.stdout).unwrap(); 171 | print!("{}\n", output_str.as_str()); 172 | } 173 | 174 | println!("{}\n", "Successful!".green()); 175 | 176 | let benchmark_paths = find_executables(repo_workdir, &settings.benchmark_regex); 177 | let mut results = execute_benchmarks(benchmark_paths); 178 | 179 | append_commit_id(&mut results, &commit_id_str); 180 | 181 | // create parent dir 182 | let export_parent_dir = export_file_path.parent().unwrap(); 183 | std::fs::create_dir_all(export_parent_dir).unwrap(); 184 | 185 | export_results_to_file(&results, export_file_path.as_path()); 186 | } 187 | 188 | Ok(()) 189 | } 190 | 191 | fn checkout_branch(repo: &Repository, branch_name: &str) -> Result<(), Error> { 192 | let obj = repo.revparse_single(&("refs/heads/".to_owned() + branch_name))?; 193 | repo.checkout_tree(&obj, None)?; 194 | repo.set_head(&("refs/heads/".to_owned() + branch_name))?; 195 | Ok(()) 196 | } 197 | 198 | fn checkout_commit(repo: &Repository, commit: &Commit) -> Result<(), Error> { 199 | repo.checkout_tree(commit.as_object(), None)?; 200 | repo.set_head_detached(commit.id())?; 201 | Ok(()) 202 | } 203 | 204 | fn export_dir(repo_path: &Path, branch_name: &str) -> PathBuf { 205 | let mut export_file_path = preferences::prefs_base_dir().unwrap(); 206 | export_file_path.push(Path::new("beastrepocheck")); 207 | export_file_path.push(repo_path.components().last().unwrap()); 208 | export_file_path.push(Path::new(branch_name)); 209 | export_file_path 210 | } 211 | 212 | fn append_commit_id(results: &mut Vec, commit_id_str: &str) { 213 | for result in results { 214 | result.commit = Some(commit_id_str.to_string()); 215 | } 216 | } 217 | 218 | fn id_to_str(oid: &[u8]) -> String { 219 | let mut oid_str = String::new(); 220 | for &byte in &oid[0..4] { 221 | write!(&mut oid_str, "{:02x}", byte).expect("Could not write commit ID byte!"); 222 | } 223 | oid_str 224 | } 225 | 226 | #[cfg(test)] 227 | mod tests { 228 | use super::*; 229 | #[test] 230 | fn test_parse() { 231 | let test_yaml_path = 232 | Path::new(env!("CARGO_MANIFEST_DIR")).join("doc/example_repocheck_settings.yaml"); 233 | let parsed_settings = parse(test_yaml_path); 234 | assert_eq!(parsed_settings.version, 1); 235 | assert_eq!(parsed_settings.repo_path.as_path(), Path::new(".")); 236 | assert_eq!(parsed_settings.branch_name, "master"); 237 | } 238 | } 239 | --------------------------------------------------------------------------------