├── .gitignore ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── img ├── availability_run.png ├── chaos.png └── chaotic_run.png └── src ├── cargo.rs ├── dependencies.rs ├── diff.rs ├── env.rs ├── error.rs ├── features.rs ├── lib.rs ├── macros.rs ├── manifest.rs ├── message.rs ├── normalize.rs ├── path.rs ├── run.rs ├── rustflags.rs └── term.rs /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | **/*.rs.bk 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "kaos" 3 | version = "0.1.1-alpha.2" 4 | authors = ["Mahmut Bulut "] 5 | description = "Chaotic testing harness" 6 | repository = "https://github.com/vertexclique/kaos" 7 | documentation = "https://docs.rs/kaos" 8 | readme = "README.md" 9 | license = "Apache-2.0/MIT" 10 | edition = "2018" 11 | exclude = [ 12 | ".github/*", 13 | "examples/*", 14 | "graphstore/*", 15 | "tests/*", 16 | "img/*", 17 | "ci/*", 18 | "benches/*", 19 | "doc/*", 20 | "*.png", 21 | "*.dot", 22 | "*.yml", 23 | "*.toml", 24 | "*.md" 25 | ] 26 | 27 | 28 | [features] 29 | diff = ["dissimilar"] 30 | 31 | [dependencies] 32 | dissimilar = { version = "1.0", optional = true } 33 | glob = "0.3" 34 | lazy_static = "1.3" 35 | serde = { version = "1.0.103", features = ["derive"] } 36 | serde_json = "1.0" 37 | termcolor = "1.0.4" 38 | toml = "0.5.2" 39 | fail = { version = "0.3", features = ["failpoints"] } 40 | humantime = "2.0.0" 41 | proptest = "0.9.5" 42 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |

4 |
5 | 6 | Chaotic Testing Harness 7 | 8 |
9 | 10 | [![Build Status](https://github.com/vertexclique/cuneiform/workflows/CI/badge.svg)](https://github.com/vertexclique/kaos/actions) 11 | [![Latest Version](https://img.shields.io/crates/v/kaos.svg)](https://crates.io/crates/kaos) 12 | [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/kaos/) 13 |
14 | 15 | **Kaos** is a chaotic testing harness to test your services against random failures. It allows you to add points to your code to crash sporadically and harness asserts availability and fault tolerance of your services by seeking minimum time between failures, fail points, and randomized runs. 16 | 17 | Kaos is equivalent of Chaos Monkey for the Rust ecosystem. But it is more smart to find the closest MTBF based on previous runs. This is dependable system practice. For more information please visit [Chaos engineering](https://en.wikipedia.org/wiki/Chaos_engineering). 18 | 19 |
20 |
21 | 22 | Get started! 23 | 24 |
25 | -------------------------------------------------------------------------------- /img/availability_run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vertexclique/kaos/9876f6c890339741cc5be4b7cb9df72baa5a6d79/img/availability_run.png -------------------------------------------------------------------------------- /img/chaos.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vertexclique/kaos/9876f6c890339741cc5be4b7cb9df72baa5a6d79/img/chaos.png -------------------------------------------------------------------------------- /img/chaotic_run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vertexclique/kaos/9876f6c890339741cc5be4b7cb9df72baa5a6d79/img/chaotic_run.png -------------------------------------------------------------------------------- /src/cargo.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | use std::path::PathBuf; 3 | use std::process::{Command, Output, Stdio}; 4 | 5 | use crate::error::{Error, Result}; 6 | use crate::manifest::Name; 7 | use crate::run::Project; 8 | use crate::rustflags; 9 | 10 | #[derive(Deserialize)] 11 | pub struct Metadata { 12 | pub target_directory: PathBuf, 13 | pub workspace_root: PathBuf, 14 | } 15 | 16 | fn raw_cargo() -> Command { 17 | Command::new(option_env!("CARGO").unwrap_or("cargo")) 18 | } 19 | 20 | fn cargo(project: &Project) -> Command { 21 | let mut cmd = raw_cargo(); 22 | cmd.current_dir(&project.dir); 23 | cmd.env( 24 | "CARGO_TARGET_DIR", 25 | path!(project.target_dir / "tests" / "target"), 26 | ); 27 | cmd.arg("--offline"); 28 | rustflags::set_env(&mut cmd); 29 | cmd 30 | } 31 | 32 | pub fn build_dependencies(project: &Project) -> Result<()> { 33 | let status = cargo(project) 34 | .arg(if project.has_run_at_least { "build" } else { "check" }) 35 | .arg("--bin") 36 | .arg(&project.name) 37 | .status() 38 | .map_err(Error::Cargo)?; 39 | 40 | if status.success() { 41 | Ok(()) 42 | } else { 43 | Err(Error::CargoFail) 44 | } 45 | } 46 | 47 | pub fn build_test(project: &Project, name: &Name) -> Result { 48 | let _ = cargo(project) 49 | .arg("clean") 50 | .arg("--package") 51 | .arg(&project.name) 52 | .arg("--color=never") 53 | .stdout(Stdio::null()) 54 | .stderr(Stdio::null()) 55 | .status(); 56 | 57 | cargo(project) 58 | .arg(if project.has_run_at_least { "build" } else { "check" }) 59 | .arg("--bin") 60 | .arg(name) 61 | .args(features(project)) 62 | .arg("--quiet") 63 | .arg("--color=never") 64 | .output() 65 | .map_err(Error::Cargo) 66 | } 67 | 68 | pub fn run_test(project: &Project, name: &Name) -> Result { 69 | cargo(project) 70 | .arg("run") 71 | .arg("--bin") 72 | .arg(name) 73 | .args(features(project)) 74 | .arg("--quiet") 75 | .arg("--color=never") 76 | .output() 77 | .map_err(Error::Cargo) 78 | } 79 | 80 | pub fn metadata() -> Result { 81 | let output = raw_cargo() 82 | .arg("metadata") 83 | .arg("--format-version=1") 84 | .output() 85 | .map_err(Error::Cargo)?; 86 | 87 | serde_json::from_slice(&output.stdout).map_err(Error::Metadata) 88 | } 89 | 90 | fn features(project: &Project) -> Vec { 91 | match &project.features { 92 | Some(features) => vec![ 93 | "--no-default-features".to_owned(), 94 | "--features".to_owned(), 95 | features.join(","), 96 | ], 97 | None => vec![], 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/dependencies.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Error; 2 | use crate::manifest::Edition; 3 | use serde::de::value::MapAccessDeserializer; 4 | use serde::de::{self, Visitor}; 5 | use serde::{Deserialize, Deserializer, Serialize, Serializer}; 6 | use std::collections::BTreeMap as Map; 7 | use std::fmt; 8 | use std::fs; 9 | use std::path::Path; 10 | use std::path::PathBuf; 11 | use toml::Value; 12 | 13 | pub fn get_manifest(manifest_dir: &Path) -> Manifest { 14 | try_get_manifest(manifest_dir).unwrap_or_default() 15 | } 16 | 17 | fn try_get_manifest(manifest_dir: &Path) -> Result { 18 | let cargo_toml_path = manifest_dir.join("Cargo.toml"); 19 | let manifest_str = fs::read_to_string(cargo_toml_path)?; 20 | let mut manifest: Manifest = toml::from_str(&manifest_str)?; 21 | 22 | fix_dependencies(&mut manifest.dependencies, manifest_dir); 23 | fix_dependencies(&mut manifest.dev_dependencies, manifest_dir); 24 | 25 | Ok(manifest) 26 | } 27 | 28 | pub fn get_workspace_manifest(manifest_dir: &Path) -> WorkspaceManifest { 29 | try_get_workspace_manifest(manifest_dir).unwrap_or_default() 30 | } 31 | 32 | pub fn try_get_workspace_manifest(manifest_dir: &Path) -> Result { 33 | let cargo_toml_path = manifest_dir.join("Cargo.toml"); 34 | let manifest_str = fs::read_to_string(cargo_toml_path)?; 35 | let mut manifest: WorkspaceManifest = toml::from_str(&manifest_str)?; 36 | 37 | fix_patches(&mut manifest.patch, manifest_dir); 38 | fix_replacements(&mut manifest.replace, manifest_dir); 39 | 40 | Ok(manifest) 41 | } 42 | 43 | fn fix_dependencies(dependencies: &mut Map, dir: &Path) { 44 | // TODO: Don't remove the dependency it is not a dev dependency. 45 | // dependencies.remove("kaos"); 46 | for dep in dependencies.values_mut() { 47 | dep.path = dep.path.as_ref().map(|path| dir.join(path)); 48 | } 49 | } 50 | 51 | fn fix_patches(patches: &mut Map, dir: &Path) { 52 | for registry in patches.values_mut() { 53 | registry.crates.remove("kaos"); 54 | for patch in registry.crates.values_mut() { 55 | patch.path = patch.path.as_ref().map(|path| dir.join(path)); 56 | } 57 | } 58 | } 59 | 60 | fn fix_replacements(replacements: &mut Map, dir: &Path) { 61 | replacements.remove("kaos"); 62 | for replacement in replacements.values_mut() { 63 | replacement.path = replacement.path.as_ref().map(|path| dir.join(path)); 64 | } 65 | } 66 | 67 | #[derive(Deserialize, Default, Debug)] 68 | pub struct WorkspaceManifest { 69 | #[serde(default)] 70 | pub patch: Map, 71 | #[serde(default)] 72 | pub replace: Map, 73 | } 74 | 75 | #[derive(Deserialize, Default, Debug)] 76 | pub struct Manifest { 77 | #[serde(default)] 78 | pub package: Package, 79 | #[serde(default)] 80 | pub features: Map>, 81 | #[serde(default)] 82 | pub dependencies: Map, 83 | #[serde(default, alias = "dev-dependencies")] 84 | pub dev_dependencies: Map, 85 | } 86 | 87 | #[derive(Deserialize, Default, Debug)] 88 | pub struct Package { 89 | #[serde(default)] 90 | pub edition: Edition, 91 | } 92 | 93 | #[derive(Serialize, Deserialize, Clone, Debug)] 94 | #[serde(remote = "Self")] 95 | pub struct Dependency { 96 | #[serde(skip_serializing_if = "Option::is_none")] 97 | pub version: Option, 98 | #[serde(skip_serializing_if = "Option::is_none")] 99 | pub path: Option, 100 | #[serde( 101 | rename = "default-features", 102 | default = "get_true", 103 | skip_serializing_if = "is_true" 104 | )] 105 | pub default_features: bool, 106 | #[serde(default, skip_serializing_if = "Vec::is_empty")] 107 | pub features: Vec, 108 | #[serde(flatten)] 109 | pub rest: Map, 110 | } 111 | 112 | #[derive(Serialize, Deserialize, Clone, Debug)] 113 | #[serde(transparent)] 114 | pub struct RegistryPatch { 115 | crates: Map, 116 | } 117 | 118 | #[derive(Serialize, Deserialize, Clone, Debug)] 119 | pub struct Patch { 120 | #[serde(skip_serializing_if = "Option::is_none")] 121 | pub path: Option, 122 | #[serde(skip_serializing_if = "Option::is_none")] 123 | pub git: Option, 124 | #[serde(skip_serializing_if = "Option::is_none")] 125 | pub branch: Option, 126 | } 127 | 128 | fn get_true() -> bool { 129 | true 130 | } 131 | 132 | fn is_true(boolean: &bool) -> bool { 133 | *boolean 134 | } 135 | 136 | impl Serialize for Dependency { 137 | fn serialize(&self, serializer: S) -> Result 138 | where 139 | S: Serializer, 140 | { 141 | Dependency::serialize(self, serializer) 142 | } 143 | } 144 | 145 | impl<'de> Deserialize<'de> for Dependency { 146 | fn deserialize(deserializer: D) -> Result 147 | where 148 | D: Deserializer<'de>, 149 | { 150 | struct DependencyVisitor; 151 | 152 | impl<'de> Visitor<'de> for DependencyVisitor { 153 | type Value = Dependency; 154 | 155 | fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 156 | formatter.write_str( 157 | "a version string like \"0.9.8\" or a \ 158 | dependency like { version = \"0.9.8\" }", 159 | ) 160 | } 161 | 162 | fn visit_str(self, s: &str) -> Result 163 | where 164 | E: de::Error, 165 | { 166 | Ok(Dependency { 167 | version: Some(s.to_owned()), 168 | path: None, 169 | default_features: true, 170 | features: Vec::new(), 171 | rest: Map::new(), 172 | }) 173 | } 174 | 175 | fn visit_map(self, map: M) -> Result 176 | where 177 | M: de::MapAccess<'de>, 178 | { 179 | Dependency::deserialize(MapAccessDeserializer::new(map)) 180 | } 181 | } 182 | 183 | deserializer.deserialize_any(DependencyVisitor) 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /src/diff.rs: -------------------------------------------------------------------------------- 1 | pub use self::r#impl::Diff; 2 | 3 | pub enum Render<'a> { 4 | Common(&'a str), 5 | Unique(&'a str), 6 | } 7 | 8 | #[cfg(all(feature = "diff", not(windows)))] 9 | mod r#impl { 10 | use super::Render; 11 | use dissimilar::Chunk; 12 | use std::cmp; 13 | use std::panic; 14 | 15 | pub struct Diff<'a> { 16 | expected: &'a str, 17 | actual: &'a str, 18 | diff: Vec>, 19 | } 20 | 21 | impl<'a> Diff<'a> { 22 | pub fn compute(expected: &'a str, actual: &'a str) -> Option { 23 | if expected.len() + actual.len() > 2048 { 24 | // We don't yet trust the dissimilar crate to work well on large 25 | // inputs. 26 | return None; 27 | } 28 | 29 | // Nor on non-ascii inputs. 30 | let diff = panic::catch_unwind(|| dissimilar::diff(expected, actual)).ok()?; 31 | 32 | let mut common_len = 0; 33 | for chunk in &diff { 34 | if let Chunk::Equal(common) = chunk { 35 | common_len += common.len(); 36 | } 37 | } 38 | 39 | let bigger_len = cmp::max(expected.len(), actual.len()); 40 | let worth_printing = 5 * common_len >= 4 * bigger_len; 41 | if !worth_printing { 42 | return None; 43 | } 44 | 45 | Some(Diff { 46 | expected, 47 | actual, 48 | diff, 49 | }) 50 | } 51 | 52 | pub fn iter<'i>(&'i self, input: &str) -> impl Iterator> + 'i { 53 | let expected = input == self.expected; 54 | let actual = input == self.actual; 55 | self.diff.iter().filter_map(move |chunk| match chunk { 56 | Chunk::Equal(common) => Some(Render::Common(common)), 57 | Chunk::Delete(unique) if expected => Some(Render::Unique(unique)), 58 | Chunk::Insert(unique) if actual => Some(Render::Unique(unique)), 59 | _ => None, 60 | }) 61 | } 62 | } 63 | } 64 | 65 | #[cfg(any(not(feature = "diff"), windows))] 66 | mod r#impl { 67 | use super::Render; 68 | 69 | pub enum Diff {} 70 | 71 | impl Diff { 72 | pub fn compute(_expected: &str, _actual: &str) -> Option { 73 | None 74 | } 75 | 76 | pub fn iter(&self, _input: &str) -> Box> { 77 | let _ = Render::Common; 78 | let _ = Render::Unique; 79 | match *self {} 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/env.rs: -------------------------------------------------------------------------------- 1 | use crate::error::{Error, Result}; 2 | use std::env; 3 | 4 | #[derive(PartialEq, Debug)] 5 | pub enum Update { 6 | Wip, 7 | Overwrite, 8 | } 9 | 10 | impl Default for Update { 11 | fn default() -> Self { 12 | Update::Wip 13 | } 14 | } 15 | 16 | impl Update { 17 | pub fn env() -> Result { 18 | let var = match env::var_os("KAOS") { 19 | Some(var) => var, 20 | None => return Ok(Update::default()), 21 | }; 22 | 23 | match var.as_os_str().to_str() { 24 | Some("wip") => Ok(Update::Wip), 25 | Some("overwrite") => Ok(Update::Overwrite), 26 | _ => Err(Error::UpdateVar(var)), 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use glob::{GlobError, PatternError}; 2 | use std::env; 3 | use std::ffi::OsString; 4 | use std::fmt::{self, Display}; 5 | use std::io; 6 | use std::path::PathBuf; 7 | 8 | #[derive(Debug)] 9 | pub enum Error { 10 | Cargo(io::Error), 11 | CargoFail, 12 | Glob(GlobError), 13 | Io(io::Error), 14 | Metadata(serde_json::Error), 15 | Mismatch, 16 | Open(PathBuf, io::Error), 17 | Pattern(PatternError), 18 | PkgName(env::VarError), 19 | ProjectDir, 20 | ReadStderr(io::Error), 21 | RunFailed, 22 | ChaosTestFailed(String), 23 | ShouldNotHaveCompiled, 24 | TomlDe(toml::de::Error), 25 | TomlSer(toml::ser::Error), 26 | UpdateVar(OsString), 27 | WriteStderr(io::Error), 28 | Shrink(proptest::test_runner::TestError) 29 | } 30 | 31 | pub type Result = std::result::Result; 32 | 33 | impl Display for Error { 34 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 35 | use self::Error::*; 36 | 37 | match self { 38 | Cargo(e) => write!(f, "failed to execute cargo: {}", e), 39 | CargoFail => write!(f, "cargo reported an error"), 40 | Glob(e) => write!(f, "{}", e), 41 | Io(e) => write!(f, "{}", e), 42 | Metadata(e) => write!(f, "failed to read cargo metadata: {}", e), 43 | Mismatch => write!(f, "compiler error does not match expected error"), 44 | Open(path, e) => write!(f, "{}: {}", path.display(), e), 45 | Pattern(e) => write!(f, "{}", e), 46 | PkgName(e) => write!(f, "failed to detect CARGO_PKG_NAME: {}", e), 47 | ProjectDir => write!(f, "failed to determine name of project dir"), 48 | ReadStderr(e) => write!(f, "failed to read stderr file: {}", e), 49 | RunFailed => write!(f, "execution of the test case was unsuccessful"), 50 | ChaosTestFailed(e) => write!(f, "chaos test failed: {}", e), 51 | ShouldNotHaveCompiled => { 52 | write!(f, "expected test case to fail to compile, but it succeeded") 53 | } 54 | TomlDe(e) => write!(f, "{}", e), 55 | TomlSer(e) => write!(f, "{}", e), 56 | UpdateVar(var) => write!( 57 | f, 58 | "unrecognized value of KAOS: {:?}", 59 | var.to_string_lossy(), 60 | ), 61 | WriteStderr(e) => write!(f, "failed to write stderr file: {}", e), 62 | Shrink(e) => write!(f, "test failed with a randomization: {}", e), 63 | } 64 | } 65 | } 66 | 67 | impl Error { 68 | pub fn already_printed(&self) -> bool { 69 | use self::Error::*; 70 | 71 | match self { 72 | CargoFail | Mismatch | RunFailed | ShouldNotHaveCompiled => true, 73 | _ => false, 74 | } 75 | } 76 | } 77 | 78 | impl From for Error { 79 | fn from(err: GlobError) -> Self { 80 | Error::Glob(err) 81 | } 82 | } 83 | 84 | impl From for Error { 85 | fn from(err: PatternError) -> Self { 86 | Error::Pattern(err) 87 | } 88 | } 89 | 90 | impl From for Error { 91 | fn from(err: io::Error) -> Self { 92 | Error::Io(err) 93 | } 94 | } 95 | 96 | impl From for Error { 97 | fn from(err: toml::de::Error) -> Self { 98 | Error::TomlDe(err) 99 | } 100 | } 101 | 102 | impl From for Error { 103 | fn from(err: toml::ser::Error) -> Self { 104 | Error::TomlSer(err) 105 | } 106 | } 107 | 108 | 109 | impl From> for Error { 110 | fn from(err: proptest::test_runner::TestError) -> Self { 111 | Error::Shrink(err) 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /src/features.rs: -------------------------------------------------------------------------------- 1 | use serde::de::DeserializeOwned; 2 | use serde::{de, Deserialize, Deserializer}; 3 | use std::env; 4 | use std::error::Error; 5 | use std::ffi::OsStr; 6 | use std::fs; 7 | use std::path::PathBuf; 8 | 9 | pub fn find() -> Option> { 10 | try_find().ok() 11 | } 12 | 13 | struct Ignored; 14 | 15 | impl From for Ignored { 16 | fn from(_error: E) -> Self { 17 | Ignored 18 | } 19 | } 20 | 21 | #[derive(Deserialize)] 22 | struct Build { 23 | #[serde(deserialize_with = "from_json")] 24 | features: Vec, 25 | } 26 | 27 | fn try_find() -> Result, Ignored> { 28 | // This will look something like: 29 | // /path/to/crate_name/target/debug/deps/test_name-HASH 30 | let test_binary = env::args_os().next().ok_or(Ignored)?; 31 | 32 | // The hash at the end is ascii so not lossy, rest of conversion doesn't 33 | // matter. 34 | let test_binary_lossy = test_binary.to_string_lossy(); 35 | let hash_range = if cfg!(windows) { 36 | // Trim ".exe" from the binary name for windows. 37 | test_binary_lossy.len() - 21..test_binary_lossy.len() - 4 38 | } else { 39 | test_binary_lossy.len() - 17..test_binary_lossy.len() 40 | }; 41 | let hash = test_binary_lossy.get(hash_range).ok_or(Ignored)?; 42 | if !hash.starts_with('-') || !hash[1..].bytes().all(is_lower_hex_digit) { 43 | return Err(Ignored); 44 | } 45 | 46 | let binary_path = PathBuf::from(&test_binary); 47 | 48 | // Feature selection is saved in: 49 | // /path/to/crate_name/target/debug/.fingerprint/*-HASH/*-HASH.json 50 | let up = binary_path 51 | .parent() 52 | .ok_or(Ignored)? 53 | .parent() 54 | .ok_or(Ignored)?; 55 | let fingerprint_dir = up.join(".fingerprint"); 56 | if !fingerprint_dir.is_dir() { 57 | return Err(Ignored); 58 | } 59 | 60 | let mut hash_matches = Vec::new(); 61 | for entry in fingerprint_dir.read_dir()? { 62 | let entry = entry?; 63 | let is_dir = entry.file_type()?.is_dir(); 64 | let matching_hash = entry.file_name().to_string_lossy().ends_with(hash); 65 | if is_dir && matching_hash { 66 | hash_matches.push(entry.path()); 67 | } 68 | } 69 | 70 | if hash_matches.len() != 1 { 71 | return Err(Ignored); 72 | } 73 | 74 | let mut json_matches = Vec::new(); 75 | for entry in hash_matches[0].read_dir()? { 76 | let entry = entry?; 77 | let is_file = entry.file_type()?.is_file(); 78 | let is_json = entry.path().extension() == Some(OsStr::new("json")); 79 | if is_file && is_json { 80 | json_matches.push(entry.path()); 81 | } 82 | } 83 | 84 | if json_matches.len() != 1 { 85 | return Err(Ignored); 86 | } 87 | 88 | let build_json = fs::read_to_string(&json_matches[0])?; 89 | let build: Build = serde_json::from_str(&build_json)?; 90 | Ok(build.features) 91 | } 92 | 93 | fn is_lower_hex_digit(byte: u8) -> bool { 94 | byte >= b'0' && byte <= b'9' || byte >= b'a' && byte <= b'f' 95 | } 96 | 97 | fn from_json<'de, T, D>(deserializer: D) -> Result 98 | where 99 | T: DeserializeOwned, 100 | D: Deserializer<'de>, 101 | { 102 | let json = String::deserialize(deserializer)?; 103 | serde_json::from_str(&json).map_err(de::Error::custom) 104 | } 105 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | 2 | //! ####  Chaotic testing harness 3 | //! 4 | //! **Kaos** is a chaotic testing harness to test your services against random failures. 5 | //! It allows you to add points to your code to crash sporadically and 6 | //! harness asserts availability and fault tolerance of your services by seeking 7 | //! minimum time between failures, fail points, and randomized runs. 8 | //! 9 | //! Kaos is equivalent of Chaos Monkey for the Rust ecosystem. But it is more smart to find the closest MTBF based on previous runs. 10 | //! This is dependable system practice. For more information please visit [Chaos engineering](https://en.wikipedia.org/wiki/Chaos_engineering). 11 | //! 12 | //! # Test Setup 13 | //! 14 | //! It is better to separate resilience tests. 15 | //! Create a directory that will hold all chaos tests. In our example it will be `kaos-tests`. 16 | //! 17 | //! A minimal launcher for kaos setup looks like this: 18 | //! 19 | //! ``` 20 | //! #[test] 21 | //! fn chaos_tests() { 22 | //! let k = kaos::Runs::new(); 23 | //! 24 | //! for entry in fs::read_dir("kaos-tests").unwrap() { 25 | //! let entry = entry.unwrap(); 26 | //! let path = entry.path(); 27 | //! 28 | //! // Every service run should be available at least 2 seconds 29 | //! k.available(path, Duration::from_secs(2)); 30 | //! } 31 | //! } 32 | //! ``` 33 | //! 34 | //! and in your Cargo.toml 35 | //! 36 | //! ```toml 37 | //! [[test]] 38 | //! name = "chaos_tests" 39 | //! path = "kaos-tests/launcher.rs" 40 | //! ``` 41 | //! 42 | //! Mind that there two types of tests, first one is: availability test, the latter one is chaotic test which seeks the minimum timing, failure, MTBF combination. 43 | //! The setup shows availability tests as an example. When availability tests run you will see: 44 | //! 45 | //!

46 | //! 47 | //!

48 | //! 49 | //! 50 | //! ## Definining flunks 51 | //! In kaos there is a concept of [flunk]. Every flunk is a point of failure with panic. This can be redefinable. 52 | //! After adding kaos as dependency you can add flunk points to define fallible operations or crucial points that system should continue its operation. 53 | //! 54 | //! Basic flunk is like: 55 | //! ```rust 56 | //! use kaos::flunk; 57 | //! fn vec_check(v: &Vec) { 58 | //! if v.len() == 3 { 59 | //! flunk!("fail-when-three-elems"); 60 | //! } 61 | //! } 62 | //! ``` 63 | //! This flunk point will be used later by kaos. 64 | //! 65 | //! ## Writing tests 66 | //! Test harness will execute tests marked by a launcher. An example test for the flunk mentioned above is like this: 67 | //! ``` 68 | //! # use std::panic; 69 | //! # use kaos::flunk; 70 | //! # fn vec_check(v: &Vec) { 71 | //! # if v.len() == 3 { 72 | //! # flunk!("fail-when-three-elems"); 73 | //! # } 74 | //! # } 75 | //! use kaos::kaostest; 76 | //! 77 | //! kaostest!("fail-when-three-elems", 78 | //! { 79 | //! panic::catch_unwind(|| { 80 | //! let mut v = &mut vec![]; 81 | //! loop { 82 | //! v.push(1); 83 | //! vec_check(v); 84 | //! } 85 | //! }); 86 | //! } 87 | //! ); 88 | //! ``` 89 | //! # Chaos Tests 90 | //! 91 | //! In addition to availability tests mentioned above we can test the software with chaos tests too. 92 | //! For using chaotic measures and finding bare minimum failure, timing and MTBF combination 93 | //! you can configure chaos tests in your launcher: 94 | //! 95 | //! ``` 96 | //! #[test] 97 | //! fn chaos_tests() { 98 | //! let k = kaos::Runs::new(); 99 | //! 100 | //! for entry in fs::read_dir("kaos-tests").unwrap() { 101 | //! let entry = entry.unwrap(); 102 | //! let path = entry.path(); 103 | //! 104 | //! // Let's have 10 varying runs. 105 | //! let run_count = 10; 106 | //! 107 | //! // Minimum availability to expect as milliseconds for the runs. 108 | //! // Which corresponds as maximum surge between service runs. 109 | //! // Let's have it 10 seconds. 110 | //! let max_surge = 10 * 1000; 111 | //! 112 | //! // Run chaotic test. 113 | //! k.chaotic(path, run_count, max_surge); 114 | //! } 115 | //! } 116 | //! ``` 117 | //! This launcher produce multiple results like: 118 | //! 119 | //!

120 | //! 121 | //!

122 | //! 123 | //! Now you know all the basics, what you have to do is *unleash some chaos* with `cargo test`. 124 | //! 125 | //! Kaos is using the same approach that [trybuild](https://docs.rs/trybuild) has. 126 | //! Instead of being compiler-like test harness, it has diverged to be chaos engineering 127 | //! oriented harness. 128 | 129 | #![doc( 130 | html_logo_url = "https://raw.githubusercontent.com/vertexclique/kaos/master/img/chaos.png" 131 | )] 132 | 133 | extern crate humantime; 134 | 135 | #[macro_use] 136 | mod term; 137 | 138 | #[macro_use] 139 | mod path; 140 | 141 | mod cargo; 142 | mod dependencies; 143 | mod diff; 144 | mod env; 145 | mod error; 146 | mod features; 147 | mod manifest; 148 | mod message; 149 | mod normalize; 150 | mod run; 151 | mod rustflags; 152 | mod macros; 153 | 154 | use std::cell::RefCell; 155 | use std::path::{Path, PathBuf}; 156 | use std::{time::Duration, thread}; 157 | 158 | #[doc(hidden)] 159 | pub use fail::eval as flunker; 160 | #[doc(hidden)] 161 | pub use fail::cfg as flunker_cfg; 162 | #[doc(hidden)] 163 | pub use fail::FailScenario as KaosFailScenario; 164 | 165 | 166 | pub use macros::*; 167 | 168 | /// 169 | /// Chaotic runs test setup 170 | #[derive(Debug)] 171 | pub struct Runs { 172 | runner: RefCell, 173 | } 174 | 175 | #[derive(Debug)] 176 | struct Runner { 177 | tests: Vec, 178 | } 179 | 180 | #[derive(Clone, Debug)] 181 | struct Test { 182 | path: PathBuf, 183 | duration: Option, 184 | max_surge: isize, 185 | expected: Expected, 186 | } 187 | 188 | #[derive(Copy, Clone, Debug)] 189 | enum Expected { 190 | Available, 191 | Chaotic 192 | } 193 | 194 | impl Runs { 195 | #[allow(clippy::new_without_default)] 196 | pub fn new() -> Self { 197 | Runs { 198 | runner: RefCell::new(Runner { tests: Vec::new() }), 199 | } 200 | } 201 | 202 | pub fn available>(&self, path: P, duration: Duration) { 203 | self.runner.borrow_mut().tests.push(Test { 204 | path: path.as_ref().to_owned(), 205 | duration: Some(duration), 206 | max_surge: !0, 207 | expected: Expected::Available, 208 | }); 209 | } 210 | 211 | pub fn chaotic>(&self, path: P, run_count: usize, max_surge: usize) { 212 | (0..run_count).into_iter().for_each(|_| { 213 | self.runner.borrow_mut().tests.push(Test { 214 | path: path.as_ref().to_owned(), 215 | duration: None, 216 | max_surge: max_surge as isize, 217 | expected: Expected::Chaotic, 218 | }); 219 | }); 220 | } 221 | } 222 | 223 | #[doc(hidden)] 224 | impl Drop for Runs { 225 | fn drop(&mut self) { 226 | if !thread::panicking() { 227 | self.runner.borrow_mut().run(); 228 | } 229 | } 230 | } 231 | -------------------------------------------------------------------------------- /src/macros.rs: -------------------------------------------------------------------------------- 1 | /// 2 | /// Macro to define a point to flunk 3 | #[macro_export] 4 | macro_rules! flunk { 5 | ($name:expr) => {{ 6 | $crate::flunker($name, |_| { 7 | panic!("KAOS: Flunking at \"{}\"", $name); 8 | }); 9 | }}; 10 | } 11 | 12 | /// 13 | /// Define kaos tests 14 | #[macro_export] 15 | macro_rules! kaostest { 16 | ($name:expr, $body:block) => {{ 17 | let scenario = $crate::KaosFailScenario::setup(); 18 | $crate::flunker_cfg($name, "panic").unwrap(); 19 | 20 | $body 21 | 22 | scenario.teardown(); 23 | }}; 24 | } 25 | 26 | 27 | #[cfg(test)] 28 | mod macro_tests { 29 | #[test] 30 | fn kaostest() { 31 | kaostest!("potato", { 32 | println!("potato"); 33 | }); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/manifest.rs: -------------------------------------------------------------------------------- 1 | use crate::dependencies::{Dependency, Patch, RegistryPatch}; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::BTreeMap as Map; 4 | use std::ffi::OsStr; 5 | use std::path::PathBuf; 6 | 7 | #[derive(Serialize, Debug)] 8 | pub struct Manifest { 9 | pub package: Package, 10 | #[serde(skip_serializing_if = "Map::is_empty")] 11 | pub features: Map>, 12 | pub dependencies: Map, 13 | #[serde(rename = "bin")] 14 | pub bins: Vec, 15 | #[serde(skip_serializing_if = "Option::is_none")] 16 | pub workspace: Option, 17 | #[serde(skip_serializing_if = "Map::is_empty")] 18 | pub patch: Map, 19 | #[serde(skip_serializing_if = "Map::is_empty")] 20 | pub replace: Map, 21 | } 22 | 23 | #[derive(Serialize, Debug)] 24 | pub struct Package { 25 | pub name: String, 26 | pub version: String, 27 | pub edition: Edition, 28 | pub publish: bool, 29 | } 30 | 31 | #[derive(Serialize, Deserialize, Debug)] 32 | pub enum Edition { 33 | #[serde(rename = "2015")] 34 | E2015, 35 | #[serde(rename = "2018")] 36 | E2018, 37 | } 38 | 39 | #[derive(Serialize, Debug)] 40 | pub struct Bin { 41 | pub name: Name, 42 | pub path: PathBuf, 43 | } 44 | 45 | #[derive(Serialize, Clone, Debug)] 46 | pub struct Name(pub String); 47 | 48 | #[derive(Serialize, Debug)] 49 | pub struct Config { 50 | pub build: Build, 51 | } 52 | 53 | #[derive(Serialize, Debug)] 54 | pub struct Build { 55 | pub rustflags: Vec<&'static str>, 56 | } 57 | 58 | #[derive(Serialize, Debug)] 59 | pub struct Workspace {} 60 | 61 | impl Default for Edition { 62 | fn default() -> Self { 63 | Edition::E2018 64 | } 65 | } 66 | 67 | impl AsRef for Name { 68 | fn as_ref(&self) -> &OsStr { 69 | self.0.as_ref() 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/message.rs: -------------------------------------------------------------------------------- 1 | use termcolor::Color::{self, *}; 2 | 3 | use super::{Expected, Test}; 4 | use crate::diff::{Diff, Render}; 5 | use crate::error::Error; 6 | use crate::normalize; 7 | use crate::term; 8 | 9 | use std::env; 10 | use std::path::Path; 11 | use std::process::Output; 12 | use humantime::format_duration; 13 | 14 | pub(crate) enum Level { 15 | Fail, 16 | Warn, 17 | } 18 | 19 | pub(crate) use self::Level::*; 20 | 21 | pub(crate) fn prepare_fail(err: Error) { 22 | if err.already_printed() { 23 | return; 24 | } 25 | 26 | term::bold_color(Red); 27 | print!("ERROR"); 28 | term::reset(); 29 | println!(": {}", err); 30 | println!(); 31 | } 32 | 33 | pub(crate) fn test_fail(err: Error) { 34 | if err.already_printed() { 35 | return; 36 | } 37 | 38 | term::bold_color(Red); 39 | println!("error"); 40 | term::color(Red); 41 | println!("{}", err); 42 | term::reset(); 43 | println!(); 44 | } 45 | 46 | pub(crate) fn no_tests_enabled() { 47 | term::color(Yellow); 48 | println!("There are no kaos tests enabled yet."); 49 | term::reset(); 50 | } 51 | 52 | pub(crate) fn ok() { 53 | term::color(Green); 54 | println!("ok"); 55 | term::reset(); 56 | } 57 | 58 | pub(crate) fn begin_test(test: &Test, show_expected: bool) { 59 | let display_name = if show_expected { 60 | test.path 61 | .file_name() 62 | .unwrap_or_else(|| test.path.as_os_str()) 63 | .to_string_lossy() 64 | } else { 65 | test.path.as_os_str().to_string_lossy() 66 | }; 67 | 68 | print!("test "); 69 | term::bold(); 70 | print!("{}", display_name); 71 | term::reset(); 72 | 73 | if show_expected { 74 | match test.expected { 75 | Expected::Available => print!(" [should survive at least {}]", format_duration(test.duration.unwrap()).to_string()), 76 | Expected::Chaotic => print!(" [should survive from chaos]"), 77 | } 78 | } 79 | 80 | print!(" ... "); 81 | } 82 | 83 | pub(crate) fn failed_to_build(stderr: &str) { 84 | term::bold_color(Red); 85 | println!("error"); 86 | snippet(Red, stderr); 87 | println!(); 88 | } 89 | 90 | pub(crate) fn should_not_have_compiled() { 91 | term::bold_color(Red); 92 | println!("error"); 93 | term::color(Red); 94 | println!("Expected test case to fail to compile, but it succeeded."); 95 | term::reset(); 96 | println!(); 97 | } 98 | 99 | pub(crate) fn write_stderr_wip(wip_path: &Path, stderr_path: &Path, stderr: &str) { 100 | let wip_path = wip_path.to_string_lossy(); 101 | let stderr_path = stderr_path.to_string_lossy(); 102 | 103 | term::bold_color(Yellow); 104 | println!("wip"); 105 | println!(); 106 | print!("NOTE"); 107 | term::reset(); 108 | println!(": writing the following output to `{}`.", wip_path); 109 | println!( 110 | "Move this file to `{}` to accept it as correct.", 111 | stderr_path, 112 | ); 113 | snippet(Yellow, stderr); 114 | println!(); 115 | } 116 | 117 | pub(crate) fn overwrite_stderr(stderr_path: &Path, stderr: &str) { 118 | let stderr_path = stderr_path.to_string_lossy(); 119 | 120 | term::bold_color(Yellow); 121 | println!("wip"); 122 | println!(); 123 | print!("NOTE"); 124 | term::reset(); 125 | println!(": writing the following output to `{}`.", stderr_path); 126 | snippet(Yellow, stderr); 127 | println!(); 128 | } 129 | 130 | pub(crate) fn mismatch(expected: &str, actual: &str) { 131 | term::bold_color(Red); 132 | println!("mismatch"); 133 | term::reset(); 134 | println!(); 135 | let diff = if env::var_os("TERM").map_or(true, |term| term == "dumb") { 136 | // No diff in dumb terminal or when TERM is unset. 137 | None 138 | } else { 139 | Diff::compute(expected, actual) 140 | }; 141 | term::bold_color(Blue); 142 | println!("EXPECTED:"); 143 | snippet_diff(Blue, expected, diff.as_ref()); 144 | println!(); 145 | term::bold_color(Red); 146 | println!("ACTUAL OUTPUT:"); 147 | snippet_diff(Red, actual, diff.as_ref()); 148 | print!("note: If the "); 149 | term::color(Red); 150 | print!("actual output"); 151 | term::reset(); 152 | println!(" is the correct output you can bless it by rerunning"); 153 | println!(" your test with the environment variable KAOS=overwrite"); 154 | println!(); 155 | } 156 | 157 | pub(crate) fn output(warnings: &str, output: &Output) { 158 | let success = output.status.success(); 159 | let stdout = normalize::trim(&output.stdout); 160 | let stderr = normalize::trim(&output.stderr); 161 | let has_output = !stdout.is_empty() || !stderr.is_empty(); 162 | 163 | if success { 164 | ok(); 165 | if has_output || !warnings.is_empty() { 166 | println!(); 167 | } 168 | } else { 169 | term::bold_color(Red); 170 | println!("error"); 171 | term::color(Red); 172 | if has_output { 173 | println!("Test case failed at runtime."); 174 | } else { 175 | println!("Execution of the test case was unsuccessful but there was no output."); 176 | } 177 | term::reset(); 178 | println!(); 179 | } 180 | 181 | self::warnings(warnings); 182 | 183 | let color = if success { Yellow } else { Red }; 184 | 185 | for (name, content) in &[("STDOUT", stdout), ("STDERR", stderr)] { 186 | if !content.is_empty() { 187 | term::bold_color(color); 188 | println!("{}:", name); 189 | snippet(color, &normalize::trim(content)); 190 | println!(); 191 | } 192 | } 193 | } 194 | 195 | pub(crate) fn fail_output(level: Level, stdout: &[u8]) { 196 | let color = match level { 197 | Fail => Red, 198 | Warn => Yellow, 199 | }; 200 | 201 | if !stdout.is_empty() { 202 | term::bold_color(color); 203 | println!("STDOUT:"); 204 | snippet(color, &normalize::trim(stdout)); 205 | println!(); 206 | } 207 | } 208 | 209 | pub(crate) fn warnings(warnings: &str) { 210 | if warnings.is_empty() { 211 | return; 212 | } 213 | 214 | term::bold_color(Yellow); 215 | println!("WARNINGS:"); 216 | snippet(Yellow, warnings); 217 | println!(); 218 | } 219 | 220 | fn snippet(color: Color, content: &str) { 221 | snippet_diff(color, content, None); 222 | } 223 | 224 | fn snippet_diff(color: Color, content: &str, diff: Option<&Diff>) { 225 | fn dotted_line() { 226 | println!("{}", "┈".repeat(60)); 227 | } 228 | 229 | term::color(color); 230 | dotted_line(); 231 | 232 | match diff { 233 | Some(diff) => { 234 | for chunk in diff.iter(content) { 235 | match chunk { 236 | Render::Common(s) => { 237 | term::color(color); 238 | print!("{}", s); 239 | } 240 | Render::Unique(s) => { 241 | term::bold_color(color); 242 | print!("\x1B[7m{}", s); 243 | } 244 | } 245 | } 246 | } 247 | None => print!("{}", content), 248 | } 249 | 250 | term::color(color); 251 | dotted_line(); 252 | term::reset(); 253 | } 254 | -------------------------------------------------------------------------------- /src/normalize.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | 3 | #[derive(Copy, Clone)] 4 | pub struct Context<'a> { 5 | pub krate: &'a str, 6 | pub source_dir: &'a Path, 7 | pub workspace: &'a Path, 8 | } 9 | 10 | pub fn trim>(output: S) -> String { 11 | let bytes = output.as_ref(); 12 | let mut normalized = String::from_utf8_lossy(bytes).to_string(); 13 | 14 | let len = normalized.trim_end().len(); 15 | normalized.truncate(len); 16 | 17 | if !normalized.is_empty() { 18 | normalized.push('\n'); 19 | } 20 | 21 | normalized 22 | } 23 | 24 | /// For a given compiler output, produces the set of saved outputs against which 25 | /// the compiler's output would be considered correct. If the test's saved 26 | /// stderr file is identical to any one of these variations, the test will pass. 27 | /// 28 | /// This is a set rather than just one normalized output in order to avoid 29 | /// breaking existing tests when introducing new normalization steps. Someone 30 | /// may have saved stderr snapshots with an older version of kaos, and those 31 | /// tests need to continue to pass with newer versions of kaos. 32 | /// 33 | /// There is one "preferred" variation which is what we print when the stderr 34 | /// file is absent or not a match. 35 | pub fn diagnostics(output: Vec, context: Context) -> Variations { 36 | let mut from_bytes = String::from_utf8_lossy(&output).to_string(); 37 | from_bytes = from_bytes.replace("\r\n", "\n"); 38 | 39 | let variations = [ 40 | Basic, 41 | StripCouldNotCompile, 42 | StripCouldNotCompile2, 43 | StripForMoreInformation, 44 | StripForMoreInformation2, 45 | ] 46 | .iter() 47 | .map(|normalization| apply(&from_bytes, *normalization, context)) 48 | .collect(); 49 | 50 | Variations { variations } 51 | } 52 | 53 | pub struct Variations { 54 | variations: Vec, 55 | } 56 | 57 | impl Variations { 58 | pub fn preferred(&self) -> &str { 59 | self.variations.last().unwrap() 60 | } 61 | 62 | pub fn any bool>(&self, mut f: F) -> bool { 63 | self.variations.iter().any(|stderr| f(stderr)) 64 | } 65 | } 66 | 67 | #[derive(PartialOrd, PartialEq, Copy, Clone)] 68 | enum Normalization { 69 | Basic, 70 | StripCouldNotCompile, 71 | StripCouldNotCompile2, 72 | StripForMoreInformation, 73 | StripForMoreInformation2, 74 | } 75 | 76 | use self::Normalization::*; 77 | 78 | fn apply(original: &str, normalization: Normalization, context: Context) -> String { 79 | let mut normalized = String::new(); 80 | 81 | for line in original.lines() { 82 | if let Some(line) = filter(line, normalization, context) { 83 | normalized += &line; 84 | if !normalized.ends_with("\n\n") { 85 | normalized.push('\n'); 86 | } 87 | } 88 | } 89 | 90 | trim(normalized) 91 | } 92 | 93 | fn filter(line: &str, normalization: Normalization, context: Context) -> Option { 94 | if line.trim_start().starts_with("--> ") { 95 | if let Some(cut_end) = line.rfind(&['/', '\\'][..]) { 96 | let cut_start = line.find('>').unwrap() + 2; 97 | return Some(line[..cut_start].to_owned() + "$DIR/" + &line[cut_end + 1..]); 98 | } 99 | } 100 | 101 | if line.trim_start().starts_with("::: ") { 102 | let line = line.replace(context.workspace.to_string_lossy().as_ref(), "$WORKSPACE"); 103 | return Some(line.replace('\\', "/")); 104 | } 105 | 106 | if line.starts_with("error: aborting due to ") { 107 | return None; 108 | } 109 | 110 | if line == "To learn more, run the command again with --verbose." { 111 | return None; 112 | } 113 | 114 | if normalization >= StripCouldNotCompile { 115 | if line.starts_with("error: Could not compile `") { 116 | return None; 117 | } 118 | } 119 | 120 | if normalization >= StripCouldNotCompile2 { 121 | if line.starts_with("error: could not compile `") { 122 | return None; 123 | } 124 | } 125 | 126 | if normalization >= StripForMoreInformation { 127 | if line.starts_with("For more information about this error, try `rustc --explain") { 128 | return None; 129 | } 130 | } 131 | 132 | if normalization >= StripForMoreInformation2 { 133 | if line.starts_with("Some errors have detailed explanations:") { 134 | return None; 135 | } 136 | if line.starts_with("For more information about an error, try `rustc --explain") { 137 | return None; 138 | } 139 | } 140 | 141 | let line = line 142 | .replace(context.krate, "$CRATE") 143 | .replace(context.source_dir.to_string_lossy().as_ref(), "$DIR") 144 | .replace(context.workspace.to_string_lossy().as_ref(), "$WORKSPACE"); 145 | 146 | Some(line) 147 | } 148 | -------------------------------------------------------------------------------- /src/path.rs: -------------------------------------------------------------------------------- 1 | macro_rules! path { 2 | ($($tt:tt)+) => { 3 | tokenize_path!([] [] $($tt)+) 4 | }; 5 | } 6 | 7 | // Private implementation detail. 8 | macro_rules! tokenize_path { 9 | ([$(($($component:tt)+))*] [$($cur:tt)+] / $($rest:tt)+) => { 10 | tokenize_path!([$(($($component)+))* ($($cur)+)] [] $($rest)+) 11 | }; 12 | 13 | ([$(($($component:tt)+))*] [$($cur:tt)*] $first:tt $($rest:tt)*) => { 14 | tokenize_path!([$(($($component)+))*] [$($cur)* $first] $($rest)*) 15 | }; 16 | 17 | ([$(($($component:tt)+))*] [$($cur:tt)+]) => { 18 | tokenize_path!([$(($($component)+))* ($($cur)+)]) 19 | }; 20 | 21 | ([$(($($component:tt)+))*]) => {{ 22 | let mut path = std::path::PathBuf::new(); 23 | $( 24 | path.push(&($($component)+)); 25 | )* 26 | path 27 | }}; 28 | } 29 | 30 | #[test] 31 | fn test_path_macro() { 32 | use std::path::{Path, PathBuf}; 33 | 34 | struct Project { 35 | dir: PathBuf, 36 | } 37 | 38 | let project = Project { 39 | dir: PathBuf::from("../target/tests"), 40 | }; 41 | 42 | let cargo_dir = path!(project.dir / ".cargo" / "config"); 43 | assert_eq!(cargo_dir, Path::new("../target/tests/.cargo/config")); 44 | } 45 | -------------------------------------------------------------------------------- /src/run.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap as Map; 2 | use std::env; 3 | use std::ffi::{OsStr, OsString}; 4 | use std::fs::{self, File}; 5 | use std::{time::{Instant, Duration}, path::{Path, PathBuf}}; 6 | 7 | use super::{Expected, Runner, Test}; 8 | use crate::cargo; 9 | use crate::dependencies::{self, Dependency}; 10 | use crate::env::Update; 11 | use crate::error::{Error, Result}; 12 | use crate::features; 13 | use crate::manifest::{Bin, Build, Config, Manifest, Name, Package, Workspace}; 14 | use crate::message::{self, Fail, Warn}; 15 | use crate::normalize::{self, Context, Variations}; 16 | use crate::rustflags; 17 | use std::convert::TryInto; 18 | use proptest::test_runner::{TestRunner, TestCaseError}; 19 | use humantime::format_duration; 20 | 21 | #[derive(Debug)] 22 | pub struct Project { 23 | pub dir: PathBuf, 24 | source_dir: PathBuf, 25 | pub target_dir: PathBuf, 26 | pub name: String, 27 | update: Update, 28 | pub has_run_at_least: bool, 29 | pub surges: Vec, 30 | test_idx: usize, 31 | pub durations: Vec>, 32 | has_compile_fail: bool, 33 | pub features: Option>, 34 | workspace: PathBuf, 35 | } 36 | 37 | impl Runner { 38 | pub fn run(&mut self) { 39 | let mut tests = expand_globs(&self.tests); 40 | filter(&mut tests); 41 | 42 | let mut project = self.prepare(&tests).unwrap_or_else(|err| { 43 | message::prepare_fail(err); 44 | panic!("tests failed"); 45 | }); 46 | 47 | print!("\n\n"); 48 | 49 | let len = tests.len(); 50 | let mut failures = 0; 51 | 52 | if tests.is_empty() { 53 | message::no_tests_enabled(); 54 | } else { 55 | for test in tests { 56 | if let Err(err) = test.run(&mut project) { 57 | failures += 1; 58 | message::test_fail(err); 59 | } 60 | } 61 | } 62 | 63 | print!("\n\n"); 64 | 65 | if failures > 0 && project.name != "kaos-tests" { 66 | panic!("{} of {} tests failed", failures, len); 67 | } 68 | } 69 | 70 | fn prepare(&self, tests: &[ExpandedTest]) -> Result { 71 | let metadata = cargo::metadata()?; 72 | let target_dir = metadata.target_directory; 73 | let workspace = metadata.workspace_root; 74 | 75 | let crate_name = env::var("CARGO_PKG_NAME").map_err(Error::PkgName)?; 76 | 77 | let mut has_run_at_least = false; 78 | let mut has_compile_fail = false; 79 | for e in tests { 80 | match e.test.expected { 81 | Expected::Available => has_run_at_least = true, 82 | Expected::Chaotic => has_compile_fail = true, 83 | } 84 | } 85 | 86 | let surges: Vec = 87 | tests.iter().map(|t| t.test.max_surge).collect(); 88 | 89 | let mut static_durations: Vec> = 90 | vec![None; surges.len()]; 91 | 92 | surges 93 | .iter() 94 | .position(|&e| e == !0) 95 | .map(|i| static_durations[i] = tests[i].test.duration); 96 | 97 | let source_dir = env::var_os("CARGO_MANIFEST_DIR") 98 | .map(PathBuf::from) 99 | .ok_or(Error::ProjectDir)?; 100 | 101 | let features = features::find(); 102 | 103 | let mut project = Project { 104 | dir: path!(target_dir / "tests" / crate_name), 105 | source_dir, 106 | target_dir, 107 | name: format!("{}-tests", crate_name), 108 | update: Update::env()?, 109 | surges, 110 | test_idx: 0, 111 | durations: static_durations, 112 | has_run_at_least, 113 | has_compile_fail, 114 | features, 115 | workspace, 116 | }; 117 | 118 | let manifest = self.make_manifest(crate_name, &project, tests)?; 119 | let manifest_toml = toml::to_string(&manifest)?; 120 | 121 | let config = self.make_config(); 122 | let config_toml = toml::to_string(&config)?; 123 | 124 | match &mut project.features { 125 | Some(enabled_features) => { 126 | enabled_features.retain(|feature| manifest.features.contains_key(feature)); 127 | // enabled_features.push("fail/failpoints".into()); 128 | } 129 | _ => { 130 | // project.features = Some(vec!["fail/failpoints".into()]); 131 | } 132 | } 133 | 134 | fs::create_dir_all(path!(project.dir / ".cargo"))?; 135 | fs::write(path!(project.dir / ".cargo" / "config"), config_toml)?; 136 | fs::write(path!(project.dir / "Cargo.toml"), manifest_toml)?; 137 | fs::write(path!(project.dir / "main.rs"), b"fn main() {}\n")?; 138 | 139 | cargo::build_dependencies(&project)?; 140 | 141 | Ok(project) 142 | } 143 | 144 | fn make_manifest( 145 | &self, 146 | crate_name: String, 147 | project: &Project, 148 | tests: &[ExpandedTest], 149 | ) -> Result { 150 | let source_manifest = dependencies::get_manifest(&project.source_dir); 151 | let workspace_manifest = dependencies::get_workspace_manifest(&project.workspace); 152 | 153 | let features = source_manifest 154 | .features 155 | .keys() 156 | .map(|feature| { 157 | let enable = format!("{}/{}", crate_name, feature); 158 | (feature.clone(), vec![enable]) 159 | }) 160 | .collect(); 161 | 162 | let mut manifest = Manifest { 163 | package: Package { 164 | name: project.name.clone(), 165 | version: "0.0.0".to_owned(), 166 | edition: source_manifest.package.edition, 167 | publish: false, 168 | }, 169 | features, 170 | dependencies: Map::new(), 171 | bins: Vec::new(), 172 | workspace: Some(Workspace {}), 173 | // Within a workspace, only the [patch] and [replace] sections in 174 | // the workspace root's Cargo.toml are applied by Cargo. 175 | patch: workspace_manifest.patch, 176 | replace: workspace_manifest.replace, 177 | }; 178 | 179 | manifest.dependencies.extend(source_manifest.dependencies); 180 | manifest 181 | .dependencies 182 | .extend(source_manifest.dev_dependencies); 183 | manifest.dependencies.insert( 184 | crate_name, 185 | Dependency { 186 | version: None, 187 | path: Some(project.source_dir.clone()), 188 | default_features: false, 189 | features: Vec::new(), 190 | rest: Map::new(), 191 | }, 192 | ); 193 | 194 | manifest.bins.push(Bin { 195 | name: Name(project.name.to_owned()), 196 | path: Path::new("main.rs").to_owned(), 197 | }); 198 | 199 | for expanded in tests { 200 | if expanded.error.is_none() { 201 | manifest.bins.push(Bin { 202 | name: expanded.name.clone(), 203 | path: project.source_dir.join(&expanded.test.path), 204 | }); 205 | } 206 | } 207 | 208 | Ok(manifest) 209 | } 210 | 211 | fn make_config(&self) -> Config { 212 | Config { 213 | build: Build { 214 | rustflags: rustflags::make_vec(), 215 | }, 216 | } 217 | } 218 | } 219 | 220 | impl Test { 221 | fn run(&self, project: &mut Project, name: &Name) -> Result<()> { 222 | let show_expected = project.has_run_at_least && project.has_compile_fail; 223 | let mut runner = TestRunner::default(); 224 | 225 | let max_surge = project.surges[project.test_idx]; 226 | 227 | if max_surge != !0 { 228 | project.test_idx += 1; 229 | 230 | let res = runner.run(&(0..max_surge), |v| { 231 | let duration = Duration::from_millis(v.try_into().unwrap()); 232 | let now = Instant::now(); 233 | 234 | message::begin_test(self, show_expected); 235 | check_exists(&self.path).unwrap(); 236 | 237 | let output = cargo::build_test(project, name).unwrap(); 238 | let success = output.status.success(); 239 | let stdout = output.stdout; 240 | let stderr = normalize::diagnostics( 241 | output.stderr, 242 | Context { 243 | krate: &name.0, 244 | source_dir: &project.source_dir, 245 | workspace: &project.workspace, 246 | }, 247 | ); 248 | 249 | let check = match self.expected { 250 | Expected::Available => Test::check_available, 251 | // TODO: separate cases 252 | Expected::Chaotic => Test::check_available, 253 | }; 254 | 255 | let res = check(self, project, name, success, stdout, stderr); 256 | let elapsed = now.elapsed(); 257 | if elapsed < duration { 258 | Err(TestCaseError::Fail( 259 | format!( 260 | "chaos test failed: availability is low. Expected at least: {}, Found: {}", 261 | format_duration(duration).to_string(), 262 | format_duration(elapsed).to_string() 263 | ).into() 264 | )) 265 | } else { 266 | res.map_err(|e| TestCaseError::Fail(format!("{}", e).into())) 267 | } 268 | })?; 269 | 270 | Ok(res) 271 | } else { 272 | let duration = project.durations[project.test_idx].unwrap(); 273 | let now = Instant::now(); 274 | 275 | message::begin_test(self, show_expected); 276 | check_exists(&self.path).unwrap(); 277 | 278 | let output = cargo::build_test(project, name).unwrap(); 279 | let success = output.status.success(); 280 | let stdout = output.stdout; 281 | let stderr = normalize::diagnostics( 282 | output.stderr, 283 | Context { 284 | krate: &name.0, 285 | source_dir: &project.source_dir, 286 | workspace: &project.workspace, 287 | }, 288 | ); 289 | 290 | let check = match self.expected { 291 | Expected::Available => Test::check_available, 292 | // TODO: separate cases 293 | Expected::Chaotic => Test::check_available, 294 | }; 295 | 296 | let res = check(self, project, name, success, stdout, stderr); 297 | let elapsed = now.elapsed(); 298 | if elapsed < duration { 299 | Err(Error::ChaosTestFailed( 300 | format!( 301 | "availability is low. Expected at least: {}, Found: {}", 302 | format_duration(duration).to_string(), 303 | format_duration(elapsed).to_string() 304 | ) 305 | )) 306 | } else { 307 | res 308 | } 309 | } 310 | } 311 | 312 | fn check_available( 313 | &self, 314 | project: &Project, 315 | name: &Name, 316 | success: bool, 317 | build_stdout: Vec, 318 | variations: Variations, 319 | ) -> Result<()> { 320 | let preferred = variations.preferred(); 321 | if !success { 322 | message::failed_to_build(preferred); 323 | return Err(Error::CargoFail); 324 | } 325 | 326 | let mut output = cargo::run_test(project, name)?; 327 | output.stdout.splice(..0, build_stdout); 328 | message::output(preferred, &output); 329 | if output.status.success() { 330 | Ok(()) 331 | } else { 332 | Err(Error::RunFailed) 333 | } 334 | } 335 | } 336 | 337 | fn check_exists(path: &Path) -> Result<()> { 338 | if path.exists() { 339 | return Ok(()); 340 | } 341 | match File::open(path) { 342 | Ok(_) => Ok(()), 343 | Err(err) => Err(Error::Open(path.to_owned(), err)), 344 | } 345 | } 346 | 347 | #[derive(Debug)] 348 | struct ExpandedTest { 349 | name: Name, 350 | test: Test, 351 | error: Option, 352 | } 353 | 354 | fn expand_globs(tests: &[Test]) -> Vec { 355 | fn glob(pattern: &str) -> Result> { 356 | let mut paths = glob::glob(pattern)? 357 | .map(|entry| entry.map_err(Error::from)) 358 | .collect::>>()?; 359 | paths.sort(); 360 | Ok(paths) 361 | } 362 | 363 | fn bin_name(i: usize) -> Name { 364 | Name(format!("kaos{:03}", i)) 365 | } 366 | 367 | let mut vec = Vec::new(); 368 | 369 | for test in tests { 370 | let mut expanded = ExpandedTest { 371 | name: bin_name(vec.len()), 372 | test: test.clone(), 373 | error: None, 374 | }; 375 | if let Some(utf8) = test.path.to_str() { 376 | if utf8.contains('*') { 377 | match glob(utf8) { 378 | Ok(paths) => { 379 | for path in paths { 380 | vec.push(ExpandedTest { 381 | name: bin_name(vec.len()), 382 | test: Test { 383 | path, 384 | duration: expanded.test.duration, 385 | max_surge: expanded.test.max_surge, 386 | expected: expanded.test.expected, 387 | }, 388 | error: None, 389 | }); 390 | } 391 | continue; 392 | } 393 | Err(error) => expanded.error = Some(error), 394 | } 395 | } 396 | } 397 | vec.push(expanded); 398 | } 399 | 400 | vec 401 | } 402 | 403 | impl ExpandedTest { 404 | fn run(self, project: &mut Project) -> Result<()> { 405 | match self.error { 406 | None => self.test.run(project, &self.name), 407 | Some(error) => { 408 | let show_expected = false; 409 | message::begin_test(&self.test, show_expected); 410 | Err(error) 411 | } 412 | } 413 | } 414 | } 415 | 416 | // Filter which test cases are run by kaos. 417 | // 418 | // $ cargo test -- ui kaos=tuple_structs.rs 419 | // 420 | // The first argument after `--` must be the kaos test name i.e. the name of 421 | // the function that has the #[test] attribute and calls kaos. That's to get 422 | // Cargo to run the test at all. The next argument starting with `kaos=` 423 | // provides a filename filter. Only test cases whose filename contains the 424 | // filter string will be run. 425 | fn filter(tests: &mut Vec) { 426 | let filters = env::args_os() 427 | .flat_map(OsString::into_string) 428 | .filter_map(|mut arg| { 429 | const PREFIX: &str = "kaos="; 430 | if arg.starts_with(PREFIX) && arg != PREFIX { 431 | Some(arg.split_off(PREFIX.len())) 432 | } else { 433 | None 434 | } 435 | }) 436 | .collect::>(); 437 | 438 | if filters.is_empty() { 439 | return; 440 | } 441 | 442 | tests.retain(|t| { 443 | filters 444 | .iter() 445 | .any(|f| t.test.path.to_string_lossy().contains(f)) 446 | }); 447 | } 448 | -------------------------------------------------------------------------------- /src/rustflags.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::process::Command; 3 | 4 | const RUSTFLAGS: &str = "RUSTFLAGS"; 5 | const IGNORED_LINTS: &[&str] = &["dead_code"]; 6 | 7 | pub fn make_vec() -> Vec<&'static str> { 8 | let mut rustflags = vec!["--cfg", "kaos"]; 9 | 10 | for &lint in IGNORED_LINTS { 11 | rustflags.push("-A"); 12 | rustflags.push(lint); 13 | } 14 | 15 | rustflags 16 | } 17 | 18 | pub fn set_env(cmd: &mut Command) { 19 | let mut rustflags = match env::var_os(RUSTFLAGS) { 20 | Some(rustflags) => rustflags, 21 | None => return, 22 | }; 23 | 24 | for flag in make_vec() { 25 | rustflags.push(" "); 26 | rustflags.push(flag); 27 | } 28 | 29 | cmd.env(RUSTFLAGS, rustflags); 30 | } 31 | -------------------------------------------------------------------------------- /src/term.rs: -------------------------------------------------------------------------------- 1 | use lazy_static::lazy_static; 2 | use std::io::{Result, Write}; 3 | use std::sync::{Mutex, MutexGuard, PoisonError}; 4 | use termcolor::{Color, ColorChoice, ColorSpec, StandardStream as Stream, WriteColor}; 5 | 6 | lazy_static! { 7 | static ref TERM: Mutex = Mutex::new(Term::new()); 8 | } 9 | 10 | pub fn lock() -> MutexGuard<'static, Term> { 11 | TERM.lock().unwrap_or_else(PoisonError::into_inner) 12 | } 13 | 14 | pub fn bold() { 15 | lock().set_color(ColorSpec::new().set_bold(true)); 16 | } 17 | 18 | pub fn color(color: Color) { 19 | lock().set_color(ColorSpec::new().set_fg(Some(color))); 20 | } 21 | 22 | pub fn bold_color(color: Color) { 23 | lock().set_color(ColorSpec::new().set_bold(true).set_fg(Some(color))); 24 | } 25 | 26 | pub fn reset() { 27 | lock().reset(); 28 | } 29 | 30 | #[deny(unused_macros)] 31 | macro_rules! print { 32 | ($($args:tt)*) => {{ 33 | use std::io::Write; 34 | let _ = std::write!($crate::term::lock(), $($args)*); 35 | }}; 36 | } 37 | 38 | #[deny(unused_macros)] 39 | macro_rules! println { 40 | ($($args:tt)*) => {{ 41 | use std::io::Write; 42 | let _ = std::writeln!($crate::term::lock(), $($args)*); 43 | }}; 44 | } 45 | 46 | pub struct Term { 47 | spec: ColorSpec, 48 | stream: Stream, 49 | start_of_line: bool, 50 | } 51 | 52 | impl Term { 53 | fn new() -> Self { 54 | Term { 55 | spec: ColorSpec::new(), 56 | stream: Stream::stderr(ColorChoice::Auto), 57 | start_of_line: true, 58 | } 59 | } 60 | 61 | fn set_color(&mut self, spec: &ColorSpec) { 62 | if self.spec != *spec { 63 | self.spec = spec.clone(); 64 | self.start_of_line = true; 65 | } 66 | } 67 | 68 | fn reset(&mut self) { 69 | self.spec = ColorSpec::new(); 70 | let _ = self.stream.reset(); 71 | } 72 | } 73 | 74 | impl Write for Term { 75 | // Color one line at a time because Travis does not preserve color setting 76 | // across output lines. 77 | fn write(&mut self, mut buf: &[u8]) -> Result { 78 | if self.spec.is_none() { 79 | return self.stream.write(buf); 80 | } 81 | 82 | let len = buf.len(); 83 | while !buf.is_empty() { 84 | if self.start_of_line { 85 | let _ = self.stream.set_color(&self.spec); 86 | } 87 | match buf.iter().position(|byte| *byte == b'\n') { 88 | Some(line_len) => { 89 | self.stream.write(&buf[..line_len + 1])?; 90 | self.start_of_line = true; 91 | buf = &buf[line_len + 1..]; 92 | } 93 | None => { 94 | self.stream.write(buf)?; 95 | self.start_of_line = false; 96 | break; 97 | } 98 | } 99 | } 100 | Ok(len) 101 | } 102 | 103 | fn flush(&mut self) -> Result<()> { 104 | self.stream.flush() 105 | } 106 | } 107 | --------------------------------------------------------------------------------