├── rust-toolchain.toml ├── demo.gif ├── .gitignore ├── oranda.json ├── dist-workspace.toml ├── Cargo.toml ├── LICENSE-MIT ├── src ├── tee_helper.rs ├── main.rs └── lib.rs ├── README.md ├── .github └── workflows │ ├── web.yml │ └── release.yml ├── LICENSE-APACHE └── Cargo.lock /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "1.85.0" 3 | -------------------------------------------------------------------------------- /demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mainmatter/rust-workshop-runner/HEAD/demo.gif -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # These are backup files generated by rustfmt 7 | **/*.rs.bk 8 | 9 | # MSVC Windows builds of rustc generate these, which store debugging information 10 | *.pdb 11 | .cargo 12 | vendor 13 | 14 | # Generated by `oranda generate ci` 15 | public/ -------------------------------------------------------------------------------- /oranda.json: -------------------------------------------------------------------------------- 1 | { 2 | "build": { 3 | "path_prefix": "rust-workshop-runner" 4 | }, 5 | "marketing": { 6 | "analytics": { 7 | "plausible": { 8 | "domain": "mainmatter.github.io" 9 | } 10 | } 11 | }, 12 | "components": { 13 | "artifacts": { 14 | "package_managers": { 15 | "preferred": { 16 | "cargo": "cargo install --locked workshop-runner" 17 | } 18 | } 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /dist-workspace.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["cargo:."] 3 | 4 | # Config for 'dist' 5 | [dist] 6 | # The preferred dist version to use in CI (Cargo.toml SemVer syntax) 7 | cargo-dist-version = "0.30.0" 8 | # CI backends to support 9 | ci = "github" 10 | # The installers to generate for each app 11 | installers = ["shell", "powershell"] 12 | # Target platforms to build apps for (Rust target-triple syntax) 13 | targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] 14 | # Path that installers should place binaries in 15 | install-path = "CARGO_HOME" 16 | # Whether to install an updater program 17 | install-updater = false 18 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "workshop-runner" 3 | version = "0.2.5" 4 | edition = "2024" 5 | rust-version = "1.85" 6 | authors = ["Luca Palmieri "] 7 | description = "A CLI to run test-driven Rust workshops" 8 | keywords = ["workshop", "education", "learning"] 9 | categories = ["command-line-utilities"] 10 | repository = "https://github.com/mainmatter/rust-workshop-runner" 11 | license = "Apache-2.0 OR MIT" 12 | 13 | [[bin]] 14 | name = "wr" 15 | path = "src/main.rs" 16 | 17 | [lib] 18 | name = "wr" 19 | path = "src/lib.rs" 20 | 21 | [dependencies] 22 | anyhow = "1.0.72" 23 | clap = { version = "4.3.21", features = ["derive"] } 24 | fs-err = "2.9.0" 25 | indexmap = "2.0.0" 26 | read_input = "0.8.6" 27 | regex = "1.9.3" 28 | rusqlite = { version = "0.29.0", features = ["bundled"] } 29 | serde = { version = "1.0.183", features = ["derive"] } 30 | toml = "0.7.6" 31 | yansi = "0.5.1" 32 | textwrap = "0.16.1" 33 | 34 | # The profile that 'dist' will build with 35 | [profile.dist] 36 | inherits = "release" 37 | lto = "thin" 38 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /src/tee_helper.rs: -------------------------------------------------------------------------------- 1 | use std::io::{self, Read, Write}; 2 | use std::process::{Command, Stdio}; 3 | use std::thread; 4 | 5 | pub struct Captured { 6 | pub stdout: Vec, 7 | pub stderr: Vec, 8 | pub status: std::process::ExitStatus, 9 | } 10 | 11 | /// Run a pre-configured `Command` so that its stdout & stderr are 12 | /// both forwarded live to the parent’s stdout/stderr and also captured. 13 | pub fn run_and_capture(mut cmd: Command) -> io::Result { 14 | let mut child = cmd.stdout(Stdio::piped()).stderr(Stdio::piped()).spawn()?; 15 | 16 | let mut child_stdout = child.stdout.take().unwrap(); 17 | let mut child_stderr = child.stderr.take().unwrap(); 18 | 19 | let stdout_buf = std::sync::Arc::new(std::sync::Mutex::new(Vec::new())); 20 | let stderr_buf = std::sync::Arc::new(std::sync::Mutex::new(Vec::new())); 21 | 22 | let out_clone = stdout_buf.clone(); 23 | let out_handle = thread::spawn(move || { 24 | let mut buf = [0u8; 4096]; 25 | let mut out = io::stdout(); 26 | while let Ok(n) = child_stdout.read(&mut buf) { 27 | if n == 0 { 28 | break; 29 | } 30 | let chunk = &buf[..n]; 31 | out.write_all(chunk).ok(); 32 | out.flush().ok(); 33 | out_clone.lock().unwrap().extend_from_slice(chunk); 34 | } 35 | }); 36 | 37 | let err_clone = stderr_buf.clone(); 38 | let err_handle = thread::spawn(move || { 39 | let mut buf = [0u8; 4096]; 40 | let mut err = io::stderr(); 41 | while let Ok(n) = child_stderr.read(&mut buf) { 42 | if n == 0 { 43 | break; 44 | } 45 | let chunk = &buf[..n]; 46 | err.write_all(chunk).ok(); 47 | err.flush().ok(); 48 | err_clone.lock().unwrap().extend_from_slice(chunk); 49 | } 50 | }); 51 | 52 | let status = child.wait()?; 53 | out_handle.join().unwrap(); 54 | err_handle.join().unwrap(); 55 | 56 | let stdout = stdout_buf.lock().unwrap().clone(); 57 | let stderr = stderr_buf.lock().unwrap().clone(); 58 | 59 | Ok(Captured { 60 | stdout, 61 | stderr, 62 | status, 63 | }) 64 | } 65 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 |

wr

3 |
4 | 5 | A Rust workshop runner 6 | 7 |
8 |
9 | 10 | 11 | Crates.io version 13 | 14 | 15 | 16 | Download 18 | 19 |
20 |
21 | 22 |
23 | 24 | ![demo](demo.gif) 25 | 26 | `wr` is a CLI to drive test-driven workshops written in Rust. 27 | It is designed to be used in conjunction with a workshop repository, which contains a series of exercises to be solved 28 | by the workshop participants. 29 | 30 | > This workshop runner has been developed by [Mainmatter](https://mainmatter.com/rust-consulting/) to support 31 | > our [hands-on Rust workshops](https://mainmatter.com/services/workshops/rust/). 32 | > Check out our [landing page](https://mainmatter.com/rust-consulting/) if you're looking for Rust consulting or 33 | > training! 34 | 35 |
36 | 37 | ## How to install 38 | 39 | Check out the instructions on the [release page](https://mainmatter.github.io/rust-workshop-runner/). 40 |
41 | 42 | ## How it works 43 | 44 | > What I cannot create, I do not understand. 45 | > 46 | > Richard Feynman 47 | 48 | A test-driven workshop is structured as a series of exercises. 49 | Each exercise is a Rust project with a set of tests that verify the correctness of the solution. 50 | 51 | `wr` will run the tests for the current exercise and, if they pass, allow you to move on to the next exercise while 52 | keeping track of what you have solved so far. 53 | 54 | You can see it in action in the [rust-telemetry-workshop](https://github.com/mainmatter/rust-telemetry-workshop). 55 | 56 | ## Installation 57 | 58 | ```bash 59 | cargo install --locked workshop-runner 60 | ``` 61 | 62 | Check that it has been installed correctly: 63 | 64 | ```bash 65 | wr --help 66 | ``` 67 | 68 | ## Usage 69 | 70 | Run 71 | 72 | ```bash 73 | wr 74 | ``` 75 | 76 | from the top-level folder of a workshop repository to verify the solution for the current exercise 77 | and move forward in the workshop. 78 | 79 | You can also navigate to a specific exercise and then run `wr check` from inside its directory 80 | to verify its solution, regardless of what the current exercise is. 81 | 82 | ### Continuous checking 83 | 84 | You can combine `wr` with [`cargo-watch`](https://crates.io/crates/cargo-watch) to re-check your solution every time you 85 | make a change 86 | to the code: 87 | 88 | ```bash 89 | cargo watch -- wr 90 | ``` 91 | 92 | ## Folder structure 93 | 94 | `wr` expects the following structure for the workshop repository: 95 | 96 | ``` 97 | . 98 | ├── exercises 99 | │ ├── 00_ 100 | │ │ ├── 00_ 101 | │ │ │ .. 102 | │ │ ├── 0n_ 103 | │ │ .. 104 | │ ├── 0n_ 105 | │ │ ├── 00_ 106 | │ │ │ .. 107 | │ │ ├── 0n_ 108 | ``` 109 | 110 | Each `xx_` folder must be a Rust project with its own `Cargo.toml` file. 111 | 112 | You can choose a different top-level folder name by creating a top-level `.wr.toml` file with the following content: 113 | 114 | ```toml 115 | exercises_dir = "my-top-level-folder" 116 | ``` 117 | 118 | You can refer to [rust-telemetry-workshop](https://github.com/mainmatter/rust-telemetry-workshop) as an example. 119 | -------------------------------------------------------------------------------- /.github/workflows/web.yml: -------------------------------------------------------------------------------- 1 | # Workflow to build your docs with oranda (and mdbook) 2 | # and deploy them to Github Pages 3 | name: Web 4 | 5 | # We're going to push to the gh-pages branch, so we need that permission 6 | permissions: 7 | contents: write 8 | 9 | # What situations do we want to build docs in? 10 | # All of these work independently and can be removed / commented out 11 | # if you don't want oranda/mdbook running in that situation 12 | on: 13 | # Check that a PR didn't break docs! 14 | # 15 | # Note that the "Deploy to Github Pages" step won't run in this mode, 16 | # so this won't have any side-effects. But it will tell you if a PR 17 | # completely broke oranda/mdbook. Sadly we don't provide previews (yet)! 18 | pull_request: 19 | 20 | # Whenever something gets pushed to main, update the docs! 21 | # This is great for getting docs changes live without cutting a full release. 22 | # 23 | # Note that if you're using cargo-dist, this will "race" the Release workflow 24 | # that actually builds the Github Release that oranda tries to read (and 25 | # this will almost certainly complete first). As a result you will publish 26 | # docs for the latest commit but the oranda landing page won't know about 27 | # the latest release. The workflow_run trigger below will properly wait for 28 | # cargo-dist, and so this half-published state will only last for ~10 minutes. 29 | # 30 | # If you only want docs to update with releases, disable this, or change it to 31 | # a "release" branch. You can, of course, also manually trigger a workflow run 32 | # when you want the docs to update. 33 | push: 34 | branches: 35 | - main 36 | 37 | # Whenever a workflow called "Release" completes, update the docs! 38 | # 39 | # If you're using cargo-dist, this is recommended, as it will ensure that 40 | # oranda always sees the latest release right when it's available. Note 41 | # however that Github's UI is wonky when you use workflow_run, and won't 42 | # show this workflow as part of any commit. You have to go to the "actions" 43 | # tab for your repo to see this one running (the gh-pages deploy will also 44 | # only show up there). 45 | workflow_run: 46 | workflows: ["Release"] 47 | types: 48 | - completed 49 | 50 | # Alright, let's do it! 51 | jobs: 52 | web: 53 | name: Build and deploy site and docs 54 | runs-on: ubuntu-latest 55 | steps: 56 | # Setup 57 | - uses: actions/checkout@v3 58 | with: 59 | fetch-depth: 0 60 | - uses: dtolnay/rust-toolchain@stable 61 | - uses: swatinem/rust-cache@v2 62 | 63 | # If you use any mdbook plugins, here's the place to install them! 64 | 65 | # Install and run oranda (and mdbook)! 66 | # 67 | # This will write all output to ./public/ (including copying mdbook's output to there). 68 | - name: Install and run oranda 69 | run: | 70 | curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/oranda/releases/download/v0.6.1/oranda-installer.sh | sh 71 | oranda build 72 | 73 | - name: Add demo git 74 | run: | 75 | cp demo.gif public/demo.gif 76 | 77 | # Deploy to our gh-pages branch (creating it if it doesn't exist). 78 | # The "public" dir that oranda made above will become the root dir 79 | # of this branch. 80 | # 81 | # Note that once the gh-pages branch exists, you must 82 | # go into repo's settings > pages and set "deploy from branch: gh-pages". 83 | # The other defaults work fine. 84 | - name: Deploy to Github Pages 85 | uses: JamesIves/github-pages-deploy-action@v4.4.1 86 | # ONLY if we're on main (so no PRs or feature branches allowed!) 87 | if: ${{ github.ref == 'refs/heads/main' }} 88 | with: 89 | branch: gh-pages 90 | # Gotta tell the action where to find oranda's output 91 | folder: public 92 | token: ${{ secrets.GITHUB_TOKEN }} 93 | single-commit: true 94 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by dist: https://axodotdev.github.io/cargo-dist 2 | # 3 | # Copyright 2022-2024, axodotdev 4 | # SPDX-License-Identifier: MIT or Apache-2.0 5 | # 6 | # CI that: 7 | # 8 | # * checks for a Git Tag that looks like a release 9 | # * builds artifacts with dist (archives, installers, hashes) 10 | # * uploads those artifacts to temporary workflow zip 11 | # * on success, uploads the artifacts to a GitHub Release 12 | # 13 | # Note that the GitHub Release will be created with a generated 14 | # title/body based on your changelogs. 15 | 16 | name: Release 17 | permissions: 18 | "contents": "write" 19 | 20 | # This task will run whenever you push a git tag that looks like a version 21 | # like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. 22 | # Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where 23 | # PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION 24 | # must be a Cargo-style SemVer Version (must have at least major.minor.patch). 25 | # 26 | # If PACKAGE_NAME is specified, then the announcement will be for that 27 | # package (erroring out if it doesn't have the given version or isn't dist-able). 28 | # 29 | # If PACKAGE_NAME isn't specified, then the announcement will be for all 30 | # (dist-able) packages in the workspace with that version (this mode is 31 | # intended for workspaces with only one dist-able package, or with all dist-able 32 | # packages versioned/released in lockstep). 33 | # 34 | # If you push multiple tags at once, separate instances of this workflow will 35 | # spin up, creating an independent announcement for each one. However, GitHub 36 | # will hard limit this to 3 tags per commit, as it will assume more tags is a 37 | # mistake. 38 | # 39 | # If there's a prerelease-style suffix to the version, then the release(s) 40 | # will be marked as a prerelease. 41 | on: 42 | pull_request: 43 | push: 44 | tags: 45 | - '**[0-9]+.[0-9]+.[0-9]+*' 46 | 47 | jobs: 48 | # Run 'dist plan' (or host) to determine what tasks we need to do 49 | plan: 50 | runs-on: "ubuntu-22.04" 51 | outputs: 52 | val: ${{ steps.plan.outputs.manifest }} 53 | tag: ${{ !github.event.pull_request && github.ref_name || '' }} 54 | tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} 55 | publishing: ${{ !github.event.pull_request }} 56 | env: 57 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 58 | steps: 59 | - uses: actions/checkout@v4 60 | with: 61 | persist-credentials: false 62 | submodules: recursive 63 | - name: Install dist 64 | # we specify bash to get pipefail; it guards against the `curl` command 65 | # failing. otherwise `sh` won't catch that `curl` returned non-0 66 | shell: bash 67 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.0/cargo-dist-installer.sh | sh" 68 | - name: Cache dist 69 | uses: actions/upload-artifact@v4 70 | with: 71 | name: cargo-dist-cache 72 | path: ~/.cargo/bin/dist 73 | # sure would be cool if github gave us proper conditionals... 74 | # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible 75 | # functionality based on whether this is a pull_request, and whether it's from a fork. 76 | # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* 77 | # but also really annoying to build CI around when it needs secrets to work right.) 78 | - id: plan 79 | run: | 80 | dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json 81 | echo "dist ran successfully" 82 | cat plan-dist-manifest.json 83 | echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" 84 | - name: "Upload dist-manifest.json" 85 | uses: actions/upload-artifact@v4 86 | with: 87 | name: artifacts-plan-dist-manifest 88 | path: plan-dist-manifest.json 89 | 90 | # Build and packages all the platform-specific things 91 | build-local-artifacts: 92 | name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) 93 | # Let the initial task tell us to not run (currently very blunt) 94 | needs: 95 | - plan 96 | if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} 97 | strategy: 98 | fail-fast: false 99 | # Target platforms/runners are computed by dist in create-release. 100 | # Each member of the matrix has the following arguments: 101 | # 102 | # - runner: the github runner 103 | # - dist-args: cli flags to pass to dist 104 | # - install-dist: expression to run to install dist on the runner 105 | # 106 | # Typically there will be: 107 | # - 1 "global" task that builds universal installers 108 | # - N "local" tasks that build each platform's binaries and platform-specific installers 109 | matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} 110 | runs-on: ${{ matrix.runner }} 111 | container: ${{ matrix.container && matrix.container.image || null }} 112 | env: 113 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 114 | BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json 115 | steps: 116 | - name: enable windows longpaths 117 | run: | 118 | git config --global core.longpaths true 119 | - uses: actions/checkout@v4 120 | with: 121 | persist-credentials: false 122 | submodules: recursive 123 | - name: Install Rust non-interactively if not already installed 124 | if: ${{ matrix.container }} 125 | run: | 126 | if ! command -v cargo > /dev/null 2>&1; then 127 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y 128 | echo "$HOME/.cargo/bin" >> $GITHUB_PATH 129 | fi 130 | - name: Install dist 131 | run: ${{ matrix.install_dist.run }} 132 | # Get the dist-manifest 133 | - name: Fetch local artifacts 134 | uses: actions/download-artifact@v4 135 | with: 136 | pattern: artifacts-* 137 | path: target/distrib/ 138 | merge-multiple: true 139 | - name: Install dependencies 140 | run: | 141 | ${{ matrix.packages_install }} 142 | - name: Build artifacts 143 | run: | 144 | # Actually do builds and make zips and whatnot 145 | dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json 146 | echo "dist ran successfully" 147 | - id: cargo-dist 148 | name: Post-build 149 | # We force bash here just because github makes it really hard to get values up 150 | # to "real" actions without writing to env-vars, and writing to env-vars has 151 | # inconsistent syntax between shell and powershell. 152 | shell: bash 153 | run: | 154 | # Parse out what we just built and upload it to scratch storage 155 | echo "paths<> "$GITHUB_OUTPUT" 156 | dist print-upload-files-from-manifest --manifest dist-manifest.json >> "$GITHUB_OUTPUT" 157 | echo "EOF" >> "$GITHUB_OUTPUT" 158 | 159 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 160 | - name: "Upload artifacts" 161 | uses: actions/upload-artifact@v4 162 | with: 163 | name: artifacts-build-local-${{ join(matrix.targets, '_') }} 164 | path: | 165 | ${{ steps.cargo-dist.outputs.paths }} 166 | ${{ env.BUILD_MANIFEST_NAME }} 167 | 168 | # Build and package all the platform-agnostic(ish) things 169 | build-global-artifacts: 170 | needs: 171 | - plan 172 | - build-local-artifacts 173 | runs-on: "ubuntu-22.04" 174 | env: 175 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 176 | BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json 177 | steps: 178 | - uses: actions/checkout@v4 179 | with: 180 | persist-credentials: false 181 | submodules: recursive 182 | - name: Install cached dist 183 | uses: actions/download-artifact@v4 184 | with: 185 | name: cargo-dist-cache 186 | path: ~/.cargo/bin/ 187 | - run: chmod +x ~/.cargo/bin/dist 188 | # Get all the local artifacts for the global tasks to use (for e.g. checksums) 189 | - name: Fetch local artifacts 190 | uses: actions/download-artifact@v4 191 | with: 192 | pattern: artifacts-* 193 | path: target/distrib/ 194 | merge-multiple: true 195 | - id: cargo-dist 196 | shell: bash 197 | run: | 198 | dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json 199 | echo "dist ran successfully" 200 | 201 | # Parse out what we just built and upload it to scratch storage 202 | echo "paths<> "$GITHUB_OUTPUT" 203 | jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" 204 | echo "EOF" >> "$GITHUB_OUTPUT" 205 | 206 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 207 | - name: "Upload artifacts" 208 | uses: actions/upload-artifact@v4 209 | with: 210 | name: artifacts-build-global 211 | path: | 212 | ${{ steps.cargo-dist.outputs.paths }} 213 | ${{ env.BUILD_MANIFEST_NAME }} 214 | # Determines if we should publish/announce 215 | host: 216 | needs: 217 | - plan 218 | - build-local-artifacts 219 | - build-global-artifacts 220 | # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) 221 | if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} 222 | env: 223 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 224 | runs-on: "ubuntu-22.04" 225 | outputs: 226 | val: ${{ steps.host.outputs.manifest }} 227 | steps: 228 | - uses: actions/checkout@v4 229 | with: 230 | persist-credentials: false 231 | submodules: recursive 232 | - name: Install cached dist 233 | uses: actions/download-artifact@v4 234 | with: 235 | name: cargo-dist-cache 236 | path: ~/.cargo/bin/ 237 | - run: chmod +x ~/.cargo/bin/dist 238 | # Fetch artifacts from scratch-storage 239 | - name: Fetch artifacts 240 | uses: actions/download-artifact@v4 241 | with: 242 | pattern: artifacts-* 243 | path: target/distrib/ 244 | merge-multiple: true 245 | - id: host 246 | shell: bash 247 | run: | 248 | dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json 249 | echo "artifacts uploaded and released successfully" 250 | cat dist-manifest.json 251 | echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" 252 | - name: "Upload dist-manifest.json" 253 | uses: actions/upload-artifact@v4 254 | with: 255 | # Overwrite the previous copy 256 | name: artifacts-dist-manifest 257 | path: dist-manifest.json 258 | # Create a GitHub Release while uploading all files to it 259 | - name: "Download GitHub Artifacts" 260 | uses: actions/download-artifact@v4 261 | with: 262 | pattern: artifacts-* 263 | path: artifacts 264 | merge-multiple: true 265 | - name: Cleanup 266 | run: | 267 | # Remove the granular manifests 268 | rm -f artifacts/*-dist-manifest.json 269 | - name: Create GitHub Release 270 | env: 271 | PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}" 272 | ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}" 273 | ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}" 274 | RELEASE_COMMIT: "${{ github.sha }}" 275 | run: | 276 | # Write and read notes from a file to avoid quoting breaking things 277 | echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt 278 | 279 | gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* 280 | 281 | announce: 282 | needs: 283 | - plan 284 | - host 285 | # use "always() && ..." to allow us to wait for all publish jobs while 286 | # still allowing individual publish jobs to skip themselves (for prereleases). 287 | # "host" however must run to completion, no skipping allowed! 288 | if: ${{ always() && needs.host.result == 'success' }} 289 | runs-on: "ubuntu-22.04" 290 | env: 291 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 292 | steps: 293 | - uses: actions/checkout@v4 294 | with: 295 | persist-credentials: false 296 | submodules: recursive 297 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use clap::{Parser, Subcommand}; 2 | use fs_err::PathExt; 3 | use read_input::prelude::*; 4 | use std::{ffi::OsString, path::Path}; 5 | use wr::{ 6 | ExerciseCollection, ExerciseDefinition, ExercisesConfig, OpenedExercise, Verification, 7 | tee_helper::run_and_capture, 8 | }; 9 | use yansi::Paint; 10 | 11 | /// A small CLI to manage test-driven workshops and tutorials in Rust. 12 | /// 13 | /// Each exercise comes with a set of associated tests. 14 | /// A suite of exercises is called "collection". 15 | /// 16 | /// Invoking `wr` runs tests for all the exercises you have opened so far in a collection 17 | /// to check if your solutions are correct. 18 | /// If everything runs smoothly, you will be asked if you want to move forward to the next exercise. 19 | #[derive(Parser)] 20 | #[command(author, version, about, long_about = None)] 21 | pub struct Command { 22 | #[arg(long)] 23 | /// Compile and run tests for all opened exercises, even if they have already succeeded 24 | /// in a past run. 25 | pub recheck: bool, 26 | 27 | #[arg(long)] 28 | /// By default, `wr` will run `cargo build` in quiet mode and it won't show you the logs 29 | /// coming from the build process. 30 | /// With this flag, those logs (and the progress bar) will be displayed. 31 | pub verbose: bool, 32 | 33 | #[arg(long)] 34 | /// By default, `wr` will prompt you to open the next exercise if all the currently opened 35 | /// exercises passed their tests. 36 | /// With this flag, `wr` will automatically open the next exercise if all the currently opened 37 | /// exercises passed their tests. It'll then run the tests for the newly opened exercise. 38 | /// If they pass, it'll open the next one, and so on. 39 | pub keep_going: bool, 40 | 41 | #[command(subcommand)] 42 | command: Option, 43 | } 44 | 45 | #[derive(Subcommand)] 46 | pub enum Commands { 47 | /// Open a specific exercise. 48 | /// 49 | /// You can either provide the full name of the chapter and exercise, or only their number. 50 | /// 51 | /// E.g. `wr open --chapter 01_structured_logging --exercise 00_intro` will open 52 | /// the exercise located at `01_structured_logging/00_intro`. 53 | /// The same exercise can be opened with `wr open --chapter 1 --exercise 0`. 54 | Open { 55 | /// The name of the chapter containing the exercise, or its number. 56 | /// 57 | /// E.g. `--chapter 01_structured_logging` and `--chapter 1` are equivalent. 58 | #[arg(long)] 59 | chapter: String, 60 | /// The name of the exercise, or its number within the chapter it belongs to. 61 | /// 62 | /// E.g. `--exercise 00_intro` and `--exercise 0` are equivalent. 63 | #[arg(long)] 64 | exercise: String, 65 | }, 66 | /// Run the tests for the exercise in the current directory. 67 | /// It errors if the current directory is not an exercise. 68 | Check, 69 | } 70 | 71 | fn main() -> Result<(), anyhow::Error> { 72 | let command = Command::parse(); 73 | // Enable ANSI colour support on Windows, if it's supported. 74 | // Disable it entirely otherwise. 75 | if !use_ansi_colours() { 76 | Paint::disable(); 77 | } 78 | let configuration = ExercisesConfig::load()?; 79 | let verbose = command.verbose; 80 | let mut exercises = ExerciseCollection::new(configuration.exercises_dir().to_path_buf())?; 81 | 82 | if let Some(command) = command.command { 83 | match command { 84 | Commands::Open { chapter, exercise } => { 85 | enum Selector { 86 | FullName(String), 87 | Number(u16), 88 | } 89 | 90 | impl Selector { 91 | fn new(s: String) -> Self { 92 | match s.parse::() { 93 | Ok(number) => Selector::Number(number), 94 | Err(_) => Selector::FullName(s), 95 | } 96 | } 97 | 98 | fn matches(&self, name: &str, number: u16) -> bool { 99 | match self { 100 | Selector::FullName(s) => s == name, 101 | Selector::Number(n) => *n == number, 102 | } 103 | } 104 | } 105 | 106 | impl std::fmt::Display for Selector { 107 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 108 | match self { 109 | Selector::FullName(s) => write!(f, "{}", s), 110 | Selector::Number(n) => write!(f, "{}", n), 111 | } 112 | } 113 | } 114 | 115 | let chapter_selector = Selector::new(chapter); 116 | let exercise_selector = Selector::new(exercise); 117 | 118 | let exercise = exercises.iter().find(|k| { 119 | chapter_selector.matches(&k.chapter(), k.chapter_number()) 120 | && exercise_selector.matches(&k.exercise(), k.exercise_number()) 121 | }).ok_or_else(|| { 122 | anyhow::anyhow!("There is no exercise matching `--chapter {chapter_selector} -- exercise {exercise_selector}`") 123 | })?.to_owned(); 124 | 125 | exercises.open(&exercise)?; 126 | print_opened_message(&exercise, exercises.exercises_dir()); 127 | } 128 | Commands::Check => { 129 | let current_dir = std::env::current_dir()?.fs_err_canonicalize()?; 130 | let definition = exercises 131 | .iter() 132 | .find(|k| { 133 | let manifest_folder = k 134 | .manifest_folder_path(exercises.exercises_dir()) 135 | .fs_err_canonicalize() 136 | .expect("Failed to canonicalize manifest folder path"); 137 | manifest_folder == current_dir 138 | }) 139 | .ok_or_else(|| anyhow::anyhow!("The current directory is not an exercise"))?; 140 | if let TestOutcome::Failure { command, details } = verify( 141 | &exercises, 142 | &definition, 143 | configuration.verification(), 144 | configuration.skip_build, 145 | verbose, 146 | )? { 147 | print_failure_message(&command, &details); 148 | std::process::exit(1); 149 | } 150 | } 151 | } 152 | return Ok(()); 153 | } 154 | 155 | // If no command was specified, we verify the user's progress on the workshop-runner that have already 156 | // been opened. 157 | if let TestOutcome::Failure { command, details } = seek_the_path( 158 | &mut exercises, 159 | command.recheck, 160 | configuration.verification(), 161 | configuration.skip_build, 162 | verbose, 163 | )? { 164 | print_failure_message(&command, &details); 165 | std::process::exit(1); 166 | }; 167 | 168 | // If all the currently opened workshop-runner passed their checks, we open the next one (if it exists). 169 | while let Some(next_exercise) = exercises.next()? { 170 | if command.keep_going { 171 | let next_exercise = exercises 172 | .open_next() 173 | .expect("Failed to open the next exercise"); 174 | let exercise_outcome = verify( 175 | &exercises, 176 | &next_exercise, 177 | configuration.verification(), 178 | configuration.skip_build, 179 | command.verbose, 180 | )?; 181 | if let TestOutcome::Failure { command, details } = exercise_outcome { 182 | print_failure_message(&command, &details); 183 | std::process::exit(1); 184 | }; 185 | continue; 186 | } else { 187 | println!( 188 | "\t{}\n", 189 | info_style().paint( 190 | "Eternity lies ahead of us, and behind. Your path is not yet finished. 🍂" 191 | ) 192 | ); 193 | 194 | let open_next = input::() 195 | .repeat_msg(format!( 196 | "Do you want to open the next exercise, {}? [y/n] ", 197 | next_exercise 198 | )) 199 | .err("Please answer either yes or no.") 200 | .add_test(|s| parse_bool(s).is_some()) 201 | .get(); 202 | // We can safely unwrap here because we have already validated the input. 203 | let open_next = parse_bool(&open_next).unwrap(); 204 | 205 | if open_next { 206 | let next_exercise = exercises 207 | .open_next() 208 | .expect("Failed to open the next exercise"); 209 | print_opened_message(&next_exercise, exercises.exercises_dir()); 210 | } 211 | return Ok(()); 212 | } 213 | } 214 | println!( 215 | "{}\n\t{}\n", 216 | success_style().paint("\n\tThere will be no more tasks."), 217 | info_style().paint("What is the sound of one hand clapping (for you)? 🌟") 218 | ); 219 | Ok(()) 220 | } 221 | 222 | fn parse_bool(s: &str) -> Option { 223 | match s.to_ascii_lowercase().as_str() { 224 | "yes" | "y" => Some(true), 225 | "no" | "n" => Some(false), 226 | _ => None, 227 | } 228 | } 229 | 230 | fn seek_the_path( 231 | exercises: &mut ExerciseCollection, 232 | recheck: bool, 233 | verification: &[Verification], 234 | skip_build: bool, 235 | verbose: bool, 236 | ) -> Result { 237 | println!(" \n\n{}", info_style().dimmed().paint("Running tests...\n")); 238 | for exercise in exercises.opened()? { 239 | let OpenedExercise { definition, solved } = &exercise; 240 | if !exercise.definition.exists(exercises.exercises_dir()) { 241 | exercises.close(&definition)?; 242 | continue; 243 | } 244 | if *solved && !recheck { 245 | println!( 246 | "{}", 247 | info_style().paint(format!("\t⏩ {} (Not rechecked)", definition)) 248 | ); 249 | continue; 250 | } 251 | let exercise_outcome = verify(exercises, &definition, verification, skip_build, verbose)?; 252 | if let TestOutcome::Failure { command, details } = exercise_outcome { 253 | return Ok(TestOutcome::Failure { command, details }); 254 | } 255 | } 256 | Ok(TestOutcome::Success) 257 | } 258 | 259 | fn verify( 260 | exercises: &ExerciseCollection, 261 | definition: &ExerciseDefinition, 262 | verification: &[Verification], 263 | skip_build: bool, 264 | verbose: bool, 265 | ) -> Result { 266 | let exercise_config = definition.config(exercises.exercises_dir())?; 267 | // Exercise-specific config takes precedence over the global one, if specified. 268 | let verification = exercise_config 269 | .as_ref() 270 | .map(|c| c.verification.as_slice()) 271 | .unwrap_or(verification); 272 | let exercise_outcome = _verify( 273 | &definition.manifest_path(exercises.exercises_dir()), 274 | verification, 275 | skip_build, 276 | verbose, 277 | ); 278 | match &exercise_outcome { 279 | TestOutcome::Success => { 280 | println!("{}", success_style().paint(format!("\t🚀 {}", definition))); 281 | exercises.mark_as_solved(&definition)?; 282 | } 283 | TestOutcome::Failure { .. } => { 284 | println!("{}", failure_style().paint(format!("\t❌ {}", definition))); 285 | exercises.mark_as_unsolved(&definition)?; 286 | } 287 | } 288 | Ok(exercise_outcome) 289 | } 290 | 291 | fn _verify( 292 | manifest_path: &Path, 293 | verification: &[Verification], 294 | skip_build: bool, 295 | verbose: bool, 296 | ) -> TestOutcome { 297 | // Tell cargo to return colored output, unless we are on Windows and the terminal 298 | // doesn't support it. 299 | let color_option = if use_ansi_colours() { 300 | "always" 301 | } else { 302 | "never" 303 | }; 304 | 305 | // `cargo build` first 306 | if !skip_build { 307 | let mut cmd = std::process::Command::new("cargo"); 308 | cmd.arg("build"); 309 | cmd.arg("--manifest-path"); 310 | cmd.arg(manifest_path); 311 | cmd.arg("--all-targets"); 312 | cmd.arg("--color"); 313 | cmd.arg(color_option); 314 | if !verbose { 315 | cmd.arg("-q"); 316 | } 317 | 318 | if verbose { 319 | cmd.stdout(std::process::Stdio::inherit()) 320 | .stderr(std::process::Stdio::inherit()); 321 | } 322 | 323 | let output = cmd.output().expect("Failed to build the project"); 324 | 325 | if !output.status.success() { 326 | return TestOutcome::Failure { 327 | command: format!("{:?}", cmd), 328 | details: [output.stderr, output.stdout].concat(), 329 | }; 330 | } 331 | } 332 | 333 | // Now we run the verification command. 334 | { 335 | let mut verification_commands: Vec<_> = verification 336 | .iter() 337 | .map(|v| { 338 | let mut cmd = std::process::Command::new(&v.command); 339 | cmd.args(&v.args); 340 | cmd 341 | }) 342 | .collect(); 343 | if verification_commands.is_empty() { 344 | let mut args: Vec = 345 | vec!["test".into(), "--color".into(), color_option.into()]; 346 | 347 | if !verbose { 348 | args.push("-q".into()); 349 | } 350 | 351 | let mut cmd = std::process::Command::new("cargo"); 352 | cmd.args(args); 353 | verification_commands.push(cmd); 354 | } 355 | verification_commands.iter_mut().for_each(|cmd| { 356 | // We run verification commands from the exercise's directory. 357 | cmd.current_dir( 358 | manifest_path 359 | .parent() 360 | .expect("Failed to get parent dir for manifest"), 361 | ); 362 | }); 363 | for mut verification_cmd in verification_commands { 364 | let error_msg = format!("Failed to run: `{:?}`", verification_cmd); 365 | let command_dbg = format!("{:?}", verification_cmd); 366 | let (status, stderr, stdout) = if verbose { 367 | let captured = run_and_capture(verification_cmd).expect(&error_msg); 368 | (captured.status, captured.stderr, captured.stdout) 369 | } else { 370 | let output = verification_cmd.output().expect(&error_msg); 371 | (output.status, output.stderr, output.stdout) 372 | }; 373 | 374 | if !status.success() { 375 | return TestOutcome::Failure { 376 | command: command_dbg, 377 | details: [stderr, stdout].concat(), 378 | }; 379 | } 380 | } 381 | } 382 | 383 | TestOutcome::Success 384 | } 385 | 386 | #[derive(PartialEq)] 387 | enum TestOutcome { 388 | Success, 389 | Failure { command: String, details: Vec }, 390 | } 391 | 392 | fn print_opened_message(exercise: &ExerciseDefinition, exercises_dir: &Path) { 393 | println!( 394 | "{} {}", 395 | next_style().paint("\n\tAhead of you lies"), 396 | next_style().bold().paint(format!("{exercise}")), 397 | ); 398 | let relative_path = exercise.manifest_folder_path(exercises_dir); 399 | let open_msg = format!( 400 | "\n\tOpen {:?} in your editor and get started!\n\tRun `wr` again to compile the exercise and execute its tests.", 401 | relative_path 402 | ); 403 | println!("{}", next_style().paint(open_msg)); 404 | } 405 | 406 | fn print_failure_message(command: &str, details: &[u8]) { 407 | println!( 408 | "\n\t{}\n\nFailed to run:\n\t{}\nOutput:\n{}\n", 409 | info_style() 410 | .paint("Meditate on your approach and return. Mountains are merely mountains.\n\n"), 411 | cargo_style().paint(&command), 412 | cargo_style().paint(textwrap::indent( 413 | &String::from_utf8_lossy(details).to_string(), 414 | "\t" 415 | )) 416 | ); 417 | } 418 | 419 | pub fn info_style() -> yansi::Style { 420 | yansi::Style::new(yansi::Color::Default) 421 | } 422 | pub fn cargo_style() -> yansi::Style { 423 | yansi::Style::new(yansi::Color::Default).dimmed() 424 | } 425 | pub fn next_style() -> yansi::Style { 426 | yansi::Style::new(yansi::Color::Yellow) 427 | } 428 | pub fn success_style() -> yansi::Style { 429 | yansi::Style::new(yansi::Color::Green) 430 | } 431 | pub fn failure_style() -> yansi::Style { 432 | yansi::Style::new(yansi::Color::Red) 433 | } 434 | 435 | /// Determine if our terminal output should leverage colors via ANSI escape codes. 436 | pub fn use_ansi_colours() -> bool { 437 | if cfg!(target_os = "windows") { 438 | Paint::enable_windows_ascii() 439 | } else { 440 | true 441 | } 442 | } 443 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, anyhow, bail}; 2 | use fs_err::read_dir; 3 | use regex::Regex; 4 | use rusqlite::{Connection, params}; 5 | use std::{ 6 | cmp::Ordering, 7 | collections::BTreeSet, 8 | ffi::OsStr, 9 | fmt::Formatter, 10 | path::{Path, PathBuf}, 11 | process::Command, 12 | }; 13 | 14 | pub mod tee_helper; 15 | 16 | #[derive(serde::Deserialize, Debug)] 17 | /// The configuration for the current collection of exercises. 18 | pub struct ExercisesConfig { 19 | /// The path to the directory containing the exercises, relative 20 | /// to the root of the repository. 21 | #[serde(default = "default_exercise_dir")] 22 | exercises_dir: PathBuf, 23 | /// The command that should be run to verify that the workshop-runner is working as expected. 24 | #[serde(default)] 25 | verification: Vec, 26 | /// Don't try to build the project before running the verification command. 27 | #[serde(default)] 28 | pub skip_build: bool, 29 | } 30 | 31 | #[derive(serde::Deserialize, Debug)] 32 | /// The configuration for a specific exercise. 33 | pub struct ExerciseConfig { 34 | /// The commands that should be run to verify this exercise. 35 | /// It overrides the verification command specified in the collection configuration, if any. 36 | #[serde(default)] 37 | pub verification: Vec, 38 | } 39 | 40 | #[derive(Debug, serde::Deserialize)] 41 | pub struct Verification { 42 | /// The command that should be run to verify that the workshop-runner is working as expected. 43 | pub command: String, 44 | /// The arguments that should be passed to the verification command. 45 | #[serde(default)] 46 | pub args: Vec, 47 | } 48 | 49 | fn default_exercise_dir() -> PathBuf { 50 | PathBuf::from("exercises") 51 | } 52 | 53 | impl ExercisesConfig { 54 | pub fn load() -> Result { 55 | let root_path = get_git_repository_root_dir() 56 | .context("Failed to determine the root path of the current `git` repository")?; 57 | let exercises_config_path = root_path.join(".wr.toml"); 58 | let exercises_config = fs_err::read_to_string(&exercises_config_path).context( 59 | "Failed to read the configuration for the current collection of workshop-runner", 60 | )?; 61 | let mut exercises_config: ExercisesConfig = toml::from_str(&exercises_config).with_context(|| { 62 | format!( 63 | "Failed to parse the configuration at `{}` for the current collection of workshop-runner", 64 | exercises_config_path.to_string_lossy() 65 | ) 66 | })?; 67 | // The path to the exercises directory is relative to the root of the repository. 68 | exercises_config.exercises_dir = root_path.join(&exercises_config.exercises_dir); 69 | Ok(exercises_config) 70 | } 71 | 72 | /// The path to the directory containing the exercises 73 | /// for the current collection of workshop-runner. 74 | pub fn exercises_dir(&self) -> &Path { 75 | &self.exercises_dir 76 | } 77 | 78 | /// The command(s) that should be run to verify that exercises are correct. 79 | /// If empty, workshop-runner will use `cargo test` as default. 80 | pub fn verification(&self) -> &[Verification] { 81 | &self.verification 82 | } 83 | } 84 | 85 | /// Retrieve the path to the root directory of the current `git` repository. 86 | pub fn get_git_repository_root_dir() -> Result { 87 | let cmd = Command::new("git") 88 | .args(["rev-parse", "--show-cdup"]) 89 | .output() 90 | .context("Failed to run a `git` command (`git rev-parse --show-cdup`) to determine the root path of the current `git` repository")?; 91 | if cmd.status.success() { 92 | let path = String::from_utf8(cmd.stdout) 93 | .context("The root path of the current `git` repository is not valid UTF-8")?; 94 | Ok(path.trim().into()) 95 | } else { 96 | Err(anyhow!( 97 | "Failed to determine the root path of the current `git` repository" 98 | )) 99 | } 100 | } 101 | 102 | pub struct ExerciseCollection { 103 | exercises_dir: PathBuf, 104 | connection: Connection, 105 | exercises: BTreeSet, 106 | } 107 | 108 | impl ExerciseCollection { 109 | pub fn new(exercises_dir: PathBuf) -> Result { 110 | let chapters = read_dir(&exercises_dir) 111 | .context("Failed to read the exercises directory")? 112 | .filter_map(|entry| { 113 | let Ok(entry) = entry else { 114 | return None; 115 | }; 116 | let Ok(file_type) = entry.file_type() else { 117 | return None; 118 | }; 119 | if file_type.is_dir() { 120 | Some(entry) 121 | } else { 122 | None 123 | } 124 | }); 125 | let exercises: BTreeSet = chapters 126 | .flat_map(|entry| { 127 | let chapter_name = entry.file_name(); 128 | read_dir(entry.path()).unwrap().map(move |f| { 129 | let exercise = f.unwrap(); 130 | (chapter_name.to_owned(), exercise.file_name()) 131 | }) 132 | }) 133 | .filter_map(|(c, k)| ExerciseDefinition::new(&c, &k).ok()) 134 | .collect(); 135 | 136 | let db_path = exercises_dir.join("progress.db"); 137 | // Open the database (or create it, if it doesn't exist yet). 138 | let connection = Connection::open(db_path) 139 | .context("Failed to create a SQLite database to track your progress")?; 140 | // Make sure all tables are initialised 141 | connection 142 | .execute( 143 | "CREATE TABLE IF NOT EXISTS open_exercises ( 144 | chapter TEXT NOT NULL, 145 | exercise TEXT NOT NULL, 146 | solved INTEGER NOT NULL, 147 | PRIMARY KEY (chapter, exercise) 148 | )", 149 | [], 150 | ) 151 | .context("Failed to initialise our SQLite database to track your progress")?; 152 | 153 | Ok(Self { 154 | connection, 155 | exercises_dir, 156 | exercises, 157 | }) 158 | } 159 | 160 | pub fn n_opened(&self) -> Result { 161 | let err_msg = "Failed to determine how many workshop-runner have been opened"; 162 | let mut stmt = self 163 | .connection 164 | .prepare("SELECT COUNT(*) FROM open_exercises") 165 | .context(err_msg)?; 166 | stmt.query_row([], |row| row.get(0)).context(err_msg) 167 | } 168 | 169 | /// Return an iterator over all the workshop-runner that have been opened. 170 | pub fn opened(&self) -> Result, anyhow::Error> { 171 | opened_exercises(&self.connection) 172 | } 173 | 174 | /// Return the next exercise that should be opened, if we are going through the workshop-runner 175 | /// in the expected order. 176 | pub fn next(&mut self) -> Result, anyhow::Error> { 177 | let opened = opened_exercises(&self.connection)? 178 | .into_iter() 179 | .map(|e| e.definition) 180 | .collect(); 181 | let unsolved = self 182 | .exercises 183 | .difference(&opened) 184 | .cloned() 185 | .collect::>(); 186 | for next in unsolved { 187 | if next.exists(&self.exercises_dir) { 188 | return Ok(Some(next)); 189 | } else { 190 | self.close(&next)?; 191 | } 192 | } 193 | Ok(None) 194 | } 195 | 196 | /// Record in the database that an exercise was solved, so that it can be skipped next time. 197 | pub fn mark_as_solved(&self, exercise: &ExerciseDefinition) -> Result<(), anyhow::Error> { 198 | self.connection 199 | .execute( 200 | "UPDATE open_exercises SET solved = 1 WHERE chapter = ?1 AND exercise = ?2", 201 | params![exercise.chapter(), exercise.exercise(),], 202 | ) 203 | .context("Failed to mark exercise as solved")?; 204 | Ok(()) 205 | } 206 | 207 | /// Record in the database that an exercise was not solved, so that it won't be skipped next time. 208 | pub fn mark_as_unsolved(&self, exercise: &ExerciseDefinition) -> Result<(), anyhow::Error> { 209 | self.connection 210 | .execute( 211 | "UPDATE open_exercises SET solved = 0 WHERE chapter = ?1 AND exercise = ?2", 212 | params![exercise.chapter(), exercise.exercise(),], 213 | ) 214 | .context("Failed to mark exercise as unsolved")?; 215 | Ok(()) 216 | } 217 | 218 | /// Open a specific exercise. 219 | pub fn open(&mut self, exercise: &ExerciseDefinition) -> Result<(), anyhow::Error> { 220 | if !self.exercises.contains(exercise) { 221 | bail!("The exercise you are trying to open doesn't exist") 222 | } 223 | self.connection 224 | .execute( 225 | "INSERT OR IGNORE INTO open_exercises (chapter, exercise, solved) VALUES (?1, ?2, 0)", 226 | params![exercise.chapter(), exercise.exercise(),], 227 | ) 228 | .context("Failed to open the next exercise")?; 229 | Ok(()) 230 | } 231 | 232 | /// Close a specific exercise. 233 | pub fn close(&mut self, exercise: &ExerciseDefinition) -> Result<(), anyhow::Error> { 234 | self.connection 235 | .execute( 236 | "DELETE FROM open_exercises WHERE chapter = ?1 AND exercise = ?2", 237 | params![exercise.chapter(), exercise.exercise(),], 238 | ) 239 | .context("Failed to close an exercise")?; 240 | Ok(()) 241 | } 242 | 243 | /// Open the next exercise, assuming we are going through the workshop-runner in order. 244 | pub fn open_next(&mut self) -> Result { 245 | let Some(next) = self.next()? else { 246 | bail!("There are no more exercises to open") 247 | }; 248 | self.open(&next)?; 249 | Ok(next) 250 | } 251 | 252 | /// The directory containing all the workshop chapters and workshop-runner. 253 | pub fn exercises_dir(&self) -> &Path { 254 | &self.exercises_dir 255 | } 256 | 257 | /// Iterate over the workshop-runner in the collection, in the order we expect them to be completed. 258 | /// It returns both opened and unopened workshop-runner. 259 | pub fn iter(&self) -> impl Iterator { 260 | self.exercises.iter() 261 | } 262 | } 263 | 264 | /// Return the set of all workshop-runner that have been opened. 265 | fn opened_exercises(connection: &Connection) -> Result, anyhow::Error> { 266 | let err_msg = "Failed to retrieve the list of exercises that you have already started"; 267 | let mut stmt = connection 268 | .prepare("SELECT chapter, exercise, solved FROM open_exercises") 269 | .context(err_msg)?; 270 | let opened_exercises = stmt 271 | .query_map([], |row| { 272 | let chapter = row.get_ref_unwrap(0).as_str().unwrap(); 273 | let exercise = row.get_ref_unwrap(1).as_str().unwrap(); 274 | let solved = row.get_ref_unwrap(2).as_i64().unwrap(); 275 | let solved = if solved == 0 { false } else { true }; 276 | let definition = ExerciseDefinition::new(chapter.as_ref(), exercise.as_ref()) 277 | .expect("An invalid exercise has been stored in the database"); 278 | Ok(OpenedExercise { definition, solved }) 279 | }) 280 | .context(err_msg)? 281 | .collect::, _>>()?; 282 | Ok(opened_exercises) 283 | } 284 | 285 | #[derive(Clone, PartialEq, Eq)] 286 | pub struct ExerciseDefinition { 287 | chapter_name: String, 288 | chapter_number: u16, 289 | name: String, 290 | number: u16, 291 | } 292 | 293 | #[derive(Clone, PartialEq, Eq)] 294 | pub struct OpenedExercise { 295 | pub definition: ExerciseDefinition, 296 | pub solved: bool, 297 | } 298 | 299 | impl PartialOrd for OpenedExercise { 300 | fn partial_cmp(&self, other: &Self) -> Option { 301 | self.definition.partial_cmp(&other.definition) 302 | } 303 | } 304 | 305 | impl Ord for OpenedExercise { 306 | fn cmp(&self, other: &Self) -> Ordering { 307 | self.definition.cmp(&other.definition) 308 | } 309 | } 310 | 311 | impl PartialOrd for ExerciseDefinition { 312 | fn partial_cmp(&self, other: &Self) -> Option { 313 | let ord = self 314 | .chapter_number 315 | .cmp(&other.chapter_number) 316 | .then(self.number.cmp(&other.number)); 317 | Some(ord) 318 | } 319 | } 320 | 321 | impl Ord for ExerciseDefinition { 322 | fn cmp(&self, other: &Self) -> Ordering { 323 | self.partial_cmp(other).unwrap() 324 | } 325 | } 326 | 327 | impl PartialEq for ExerciseDefinition { 328 | fn eq(&self, other: &OpenedExercise) -> bool { 329 | self == &other.definition 330 | } 331 | } 332 | 333 | impl PartialOrd for ExerciseDefinition { 334 | fn partial_cmp(&self, other: &OpenedExercise) -> Option { 335 | self.partial_cmp(&other.definition) 336 | } 337 | } 338 | 339 | impl ExerciseDefinition { 340 | pub fn new(chapter_dir_name: &OsStr, exercise_dir_name: &OsStr) -> Result { 341 | fn parse(dir_name: &OsStr, type_: &str) -> Result<(String, u16), anyhow::Error> { 342 | // TODO: compile the regex only once. 343 | let re = Regex::new(r"(?P\d{2})_(?P\w+)").unwrap(); 344 | 345 | let dir_name = dir_name.to_str().ok_or_else(|| { 346 | anyhow!( 347 | "The name of a {type_} must be valid UTF-8 text, but {:?} isn't", 348 | dir_name 349 | ) 350 | })?; 351 | match re.captures(&dir_name) { 352 | None => bail!("Failed to parse `{dir_name:?}` as a {type_} (_).",), 353 | Some(s) => { 354 | let name = s["name"].into(); 355 | let number = s["number"].parse().unwrap(); 356 | Ok((name, number)) 357 | } 358 | } 359 | } 360 | 361 | let (name, number) = parse(exercise_dir_name, "exercise")?; 362 | let (chapter_name, chapter_number) = parse(chapter_dir_name, "chapter")?; 363 | 364 | Ok(ExerciseDefinition { 365 | chapter_name, 366 | chapter_number, 367 | name, 368 | number, 369 | }) 370 | } 371 | 372 | /// The path to the `Cargo.toml` file of the current exercise. 373 | pub fn manifest_path(&self, exercises_dir: &Path) -> PathBuf { 374 | self.manifest_folder_path(exercises_dir).join("Cargo.toml") 375 | } 376 | 377 | /// The path to the folder containing the `Cargo.toml` file for the current exercise. 378 | pub fn manifest_folder_path(&self, exercises_dir: &Path) -> PathBuf { 379 | exercises_dir.join(self.chapter()).join(self.exercise()) 380 | } 381 | 382 | /// The configuration for the current exercise, if any. 383 | pub fn config(&self, exercises_dir: &Path) -> Result, anyhow::Error> { 384 | let exercise_config = self.manifest_folder_path(exercises_dir).join(".wr.toml"); 385 | if !exercise_config.exists() { 386 | return Ok(None); 387 | } 388 | let exercise_config = fs_err::read_to_string(&exercise_config).context(format!( 389 | "Failed to read the configuration for the exercise `{}`", 390 | self.exercise() 391 | ))?; 392 | let exercise_config: ExerciseConfig = 393 | toml::from_str(&exercise_config).with_context(|| { 394 | format!( 395 | "Failed to parse the configuration for the exercise `{}`", 396 | self.exercise() 397 | ) 398 | })?; 399 | Ok(Some(exercise_config)) 400 | } 401 | 402 | /// The number+name of the chapter that contains this exercise. 403 | pub fn chapter(&self) -> String { 404 | format!("{:02}_{}", self.chapter_number, self.chapter_name) 405 | } 406 | 407 | /// The number+name of this exercise. 408 | pub fn exercise(&self) -> String { 409 | format!("{:02}_{}", self.number, self.name) 410 | } 411 | 412 | /// The number of this exercise. 413 | pub fn exercise_number(&self) -> u16 { 414 | self.number 415 | } 416 | 417 | /// The number of the chapter that contains this exercise. 418 | pub fn chapter_number(&self) -> u16 { 419 | self.chapter_number 420 | } 421 | 422 | /// Verify that the exercise exists. 423 | /// It may have been removed from the repository after an update to the current course. 424 | pub fn exists(&self, exercises_dir: &Path) -> bool { 425 | self.manifest_path(exercises_dir).exists() 426 | } 427 | } 428 | 429 | impl std::fmt::Display for ExerciseDefinition { 430 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 431 | write!( 432 | f, 433 | "({:02}) {} - ({:02}) {}", 434 | self.chapter_number, self.chapter_name, self.number, self.name 435 | ) 436 | } 437 | } 438 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 4 4 | 5 | [[package]] 6 | name = "ahash" 7 | version = "0.8.11" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" 10 | dependencies = [ 11 | "cfg-if", 12 | "once_cell", 13 | "version_check", 14 | "zerocopy", 15 | ] 16 | 17 | [[package]] 18 | name = "aho-corasick" 19 | version = "1.1.3" 20 | source = "registry+https://github.com/rust-lang/crates.io-index" 21 | checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" 22 | dependencies = [ 23 | "memchr", 24 | ] 25 | 26 | [[package]] 27 | name = "allocator-api2" 28 | version = "0.2.21" 29 | source = "registry+https://github.com/rust-lang/crates.io-index" 30 | checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" 31 | 32 | [[package]] 33 | name = "anstream" 34 | version = "0.6.18" 35 | source = "registry+https://github.com/rust-lang/crates.io-index" 36 | checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" 37 | dependencies = [ 38 | "anstyle", 39 | "anstyle-parse", 40 | "anstyle-query", 41 | "anstyle-wincon", 42 | "colorchoice", 43 | "is_terminal_polyfill", 44 | "utf8parse", 45 | ] 46 | 47 | [[package]] 48 | name = "anstyle" 49 | version = "1.0.10" 50 | source = "registry+https://github.com/rust-lang/crates.io-index" 51 | checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" 52 | 53 | [[package]] 54 | name = "anstyle-parse" 55 | version = "0.2.6" 56 | source = "registry+https://github.com/rust-lang/crates.io-index" 57 | checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" 58 | dependencies = [ 59 | "utf8parse", 60 | ] 61 | 62 | [[package]] 63 | name = "anstyle-query" 64 | version = "1.1.2" 65 | source = "registry+https://github.com/rust-lang/crates.io-index" 66 | checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" 67 | dependencies = [ 68 | "windows-sys", 69 | ] 70 | 71 | [[package]] 72 | name = "anstyle-wincon" 73 | version = "3.0.7" 74 | source = "registry+https://github.com/rust-lang/crates.io-index" 75 | checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" 76 | dependencies = [ 77 | "anstyle", 78 | "once_cell", 79 | "windows-sys", 80 | ] 81 | 82 | [[package]] 83 | name = "anyhow" 84 | version = "1.0.96" 85 | source = "registry+https://github.com/rust-lang/crates.io-index" 86 | checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4" 87 | 88 | [[package]] 89 | name = "autocfg" 90 | version = "1.4.0" 91 | source = "registry+https://github.com/rust-lang/crates.io-index" 92 | checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" 93 | 94 | [[package]] 95 | name = "bitflags" 96 | version = "2.8.0" 97 | source = "registry+https://github.com/rust-lang/crates.io-index" 98 | checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" 99 | 100 | [[package]] 101 | name = "cc" 102 | version = "1.2.15" 103 | source = "registry+https://github.com/rust-lang/crates.io-index" 104 | checksum = "c736e259eea577f443d5c86c304f9f4ae0295c43f3ba05c21f1d66b5f06001af" 105 | dependencies = [ 106 | "shlex", 107 | ] 108 | 109 | [[package]] 110 | name = "cfg-if" 111 | version = "1.0.0" 112 | source = "registry+https://github.com/rust-lang/crates.io-index" 113 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 114 | 115 | [[package]] 116 | name = "clap" 117 | version = "4.5.30" 118 | source = "registry+https://github.com/rust-lang/crates.io-index" 119 | checksum = "92b7b18d71fad5313a1e320fa9897994228ce274b60faa4d694fe0ea89cd9e6d" 120 | dependencies = [ 121 | "clap_builder", 122 | "clap_derive", 123 | ] 124 | 125 | [[package]] 126 | name = "clap_builder" 127 | version = "4.5.30" 128 | source = "registry+https://github.com/rust-lang/crates.io-index" 129 | checksum = "a35db2071778a7344791a4fb4f95308b5673d219dee3ae348b86642574ecc90c" 130 | dependencies = [ 131 | "anstream", 132 | "anstyle", 133 | "clap_lex", 134 | "strsim", 135 | ] 136 | 137 | [[package]] 138 | name = "clap_derive" 139 | version = "4.5.28" 140 | source = "registry+https://github.com/rust-lang/crates.io-index" 141 | checksum = "bf4ced95c6f4a675af3da73304b9ac4ed991640c36374e4b46795c49e17cf1ed" 142 | dependencies = [ 143 | "heck", 144 | "proc-macro2", 145 | "quote", 146 | "syn", 147 | ] 148 | 149 | [[package]] 150 | name = "clap_lex" 151 | version = "0.7.4" 152 | source = "registry+https://github.com/rust-lang/crates.io-index" 153 | checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" 154 | 155 | [[package]] 156 | name = "colorchoice" 157 | version = "1.0.3" 158 | source = "registry+https://github.com/rust-lang/crates.io-index" 159 | checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" 160 | 161 | [[package]] 162 | name = "equivalent" 163 | version = "1.0.2" 164 | source = "registry+https://github.com/rust-lang/crates.io-index" 165 | checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" 166 | 167 | [[package]] 168 | name = "fallible-iterator" 169 | version = "0.2.0" 170 | source = "registry+https://github.com/rust-lang/crates.io-index" 171 | checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" 172 | 173 | [[package]] 174 | name = "fallible-streaming-iterator" 175 | version = "0.1.9" 176 | source = "registry+https://github.com/rust-lang/crates.io-index" 177 | checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" 178 | 179 | [[package]] 180 | name = "fs-err" 181 | version = "2.11.0" 182 | source = "registry+https://github.com/rust-lang/crates.io-index" 183 | checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41" 184 | dependencies = [ 185 | "autocfg", 186 | ] 187 | 188 | [[package]] 189 | name = "hashbrown" 190 | version = "0.14.5" 191 | source = "registry+https://github.com/rust-lang/crates.io-index" 192 | checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" 193 | dependencies = [ 194 | "ahash", 195 | "allocator-api2", 196 | ] 197 | 198 | [[package]] 199 | name = "hashbrown" 200 | version = "0.15.2" 201 | source = "registry+https://github.com/rust-lang/crates.io-index" 202 | checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" 203 | 204 | [[package]] 205 | name = "hashlink" 206 | version = "0.8.4" 207 | source = "registry+https://github.com/rust-lang/crates.io-index" 208 | checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" 209 | dependencies = [ 210 | "hashbrown 0.14.5", 211 | ] 212 | 213 | [[package]] 214 | name = "heck" 215 | version = "0.5.0" 216 | source = "registry+https://github.com/rust-lang/crates.io-index" 217 | checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" 218 | 219 | [[package]] 220 | name = "indexmap" 221 | version = "2.7.1" 222 | source = "registry+https://github.com/rust-lang/crates.io-index" 223 | checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" 224 | dependencies = [ 225 | "equivalent", 226 | "hashbrown 0.15.2", 227 | ] 228 | 229 | [[package]] 230 | name = "is_terminal_polyfill" 231 | version = "1.70.1" 232 | source = "registry+https://github.com/rust-lang/crates.io-index" 233 | checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" 234 | 235 | [[package]] 236 | name = "libsqlite3-sys" 237 | version = "0.26.0" 238 | source = "registry+https://github.com/rust-lang/crates.io-index" 239 | checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" 240 | dependencies = [ 241 | "cc", 242 | "pkg-config", 243 | "vcpkg", 244 | ] 245 | 246 | [[package]] 247 | name = "memchr" 248 | version = "2.7.4" 249 | source = "registry+https://github.com/rust-lang/crates.io-index" 250 | checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" 251 | 252 | [[package]] 253 | name = "once_cell" 254 | version = "1.20.3" 255 | source = "registry+https://github.com/rust-lang/crates.io-index" 256 | checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" 257 | 258 | [[package]] 259 | name = "pkg-config" 260 | version = "0.3.31" 261 | source = "registry+https://github.com/rust-lang/crates.io-index" 262 | checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" 263 | 264 | [[package]] 265 | name = "proc-macro2" 266 | version = "1.0.93" 267 | source = "registry+https://github.com/rust-lang/crates.io-index" 268 | checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" 269 | dependencies = [ 270 | "unicode-ident", 271 | ] 272 | 273 | [[package]] 274 | name = "quote" 275 | version = "1.0.38" 276 | source = "registry+https://github.com/rust-lang/crates.io-index" 277 | checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" 278 | dependencies = [ 279 | "proc-macro2", 280 | ] 281 | 282 | [[package]] 283 | name = "read_input" 284 | version = "0.8.6" 285 | source = "registry+https://github.com/rust-lang/crates.io-index" 286 | checksum = "2f178674da3d005db760b30d6735a989d692da37b86337daec6f2e311223d608" 287 | 288 | [[package]] 289 | name = "regex" 290 | version = "1.11.1" 291 | source = "registry+https://github.com/rust-lang/crates.io-index" 292 | checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" 293 | dependencies = [ 294 | "aho-corasick", 295 | "memchr", 296 | "regex-automata", 297 | "regex-syntax", 298 | ] 299 | 300 | [[package]] 301 | name = "regex-automata" 302 | version = "0.4.9" 303 | source = "registry+https://github.com/rust-lang/crates.io-index" 304 | checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" 305 | dependencies = [ 306 | "aho-corasick", 307 | "memchr", 308 | "regex-syntax", 309 | ] 310 | 311 | [[package]] 312 | name = "regex-syntax" 313 | version = "0.8.5" 314 | source = "registry+https://github.com/rust-lang/crates.io-index" 315 | checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" 316 | 317 | [[package]] 318 | name = "rusqlite" 319 | version = "0.29.0" 320 | source = "registry+https://github.com/rust-lang/crates.io-index" 321 | checksum = "549b9d036d571d42e6e85d1c1425e2ac83491075078ca9a15be021c56b1641f2" 322 | dependencies = [ 323 | "bitflags", 324 | "fallible-iterator", 325 | "fallible-streaming-iterator", 326 | "hashlink", 327 | "libsqlite3-sys", 328 | "smallvec", 329 | ] 330 | 331 | [[package]] 332 | name = "serde" 333 | version = "1.0.218" 334 | source = "registry+https://github.com/rust-lang/crates.io-index" 335 | checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" 336 | dependencies = [ 337 | "serde_derive", 338 | ] 339 | 340 | [[package]] 341 | name = "serde_derive" 342 | version = "1.0.218" 343 | source = "registry+https://github.com/rust-lang/crates.io-index" 344 | checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" 345 | dependencies = [ 346 | "proc-macro2", 347 | "quote", 348 | "syn", 349 | ] 350 | 351 | [[package]] 352 | name = "serde_spanned" 353 | version = "0.6.8" 354 | source = "registry+https://github.com/rust-lang/crates.io-index" 355 | checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" 356 | dependencies = [ 357 | "serde", 358 | ] 359 | 360 | [[package]] 361 | name = "shlex" 362 | version = "1.3.0" 363 | source = "registry+https://github.com/rust-lang/crates.io-index" 364 | checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" 365 | 366 | [[package]] 367 | name = "smallvec" 368 | version = "1.14.0" 369 | source = "registry+https://github.com/rust-lang/crates.io-index" 370 | checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" 371 | 372 | [[package]] 373 | name = "smawk" 374 | version = "0.3.2" 375 | source = "registry+https://github.com/rust-lang/crates.io-index" 376 | checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" 377 | 378 | [[package]] 379 | name = "strsim" 380 | version = "0.11.1" 381 | source = "registry+https://github.com/rust-lang/crates.io-index" 382 | checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" 383 | 384 | [[package]] 385 | name = "syn" 386 | version = "2.0.98" 387 | source = "registry+https://github.com/rust-lang/crates.io-index" 388 | checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" 389 | dependencies = [ 390 | "proc-macro2", 391 | "quote", 392 | "unicode-ident", 393 | ] 394 | 395 | [[package]] 396 | name = "textwrap" 397 | version = "0.16.1" 398 | source = "registry+https://github.com/rust-lang/crates.io-index" 399 | checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" 400 | dependencies = [ 401 | "smawk", 402 | "unicode-linebreak", 403 | "unicode-width", 404 | ] 405 | 406 | [[package]] 407 | name = "toml" 408 | version = "0.7.8" 409 | source = "registry+https://github.com/rust-lang/crates.io-index" 410 | checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" 411 | dependencies = [ 412 | "serde", 413 | "serde_spanned", 414 | "toml_datetime", 415 | "toml_edit", 416 | ] 417 | 418 | [[package]] 419 | name = "toml_datetime" 420 | version = "0.6.8" 421 | source = "registry+https://github.com/rust-lang/crates.io-index" 422 | checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" 423 | dependencies = [ 424 | "serde", 425 | ] 426 | 427 | [[package]] 428 | name = "toml_edit" 429 | version = "0.19.15" 430 | source = "registry+https://github.com/rust-lang/crates.io-index" 431 | checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" 432 | dependencies = [ 433 | "indexmap", 434 | "serde", 435 | "serde_spanned", 436 | "toml_datetime", 437 | "winnow", 438 | ] 439 | 440 | [[package]] 441 | name = "unicode-ident" 442 | version = "1.0.17" 443 | source = "registry+https://github.com/rust-lang/crates.io-index" 444 | checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe" 445 | 446 | [[package]] 447 | name = "unicode-linebreak" 448 | version = "0.1.5" 449 | source = "registry+https://github.com/rust-lang/crates.io-index" 450 | checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" 451 | 452 | [[package]] 453 | name = "unicode-width" 454 | version = "0.1.14" 455 | source = "registry+https://github.com/rust-lang/crates.io-index" 456 | checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" 457 | 458 | [[package]] 459 | name = "utf8parse" 460 | version = "0.2.2" 461 | source = "registry+https://github.com/rust-lang/crates.io-index" 462 | checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" 463 | 464 | [[package]] 465 | name = "vcpkg" 466 | version = "0.2.15" 467 | source = "registry+https://github.com/rust-lang/crates.io-index" 468 | checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" 469 | 470 | [[package]] 471 | name = "version_check" 472 | version = "0.9.5" 473 | source = "registry+https://github.com/rust-lang/crates.io-index" 474 | checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" 475 | 476 | [[package]] 477 | name = "windows-sys" 478 | version = "0.59.0" 479 | source = "registry+https://github.com/rust-lang/crates.io-index" 480 | checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" 481 | dependencies = [ 482 | "windows-targets", 483 | ] 484 | 485 | [[package]] 486 | name = "windows-targets" 487 | version = "0.52.6" 488 | source = "registry+https://github.com/rust-lang/crates.io-index" 489 | checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" 490 | dependencies = [ 491 | "windows_aarch64_gnullvm", 492 | "windows_aarch64_msvc", 493 | "windows_i686_gnu", 494 | "windows_i686_gnullvm", 495 | "windows_i686_msvc", 496 | "windows_x86_64_gnu", 497 | "windows_x86_64_gnullvm", 498 | "windows_x86_64_msvc", 499 | ] 500 | 501 | [[package]] 502 | name = "windows_aarch64_gnullvm" 503 | version = "0.52.6" 504 | source = "registry+https://github.com/rust-lang/crates.io-index" 505 | checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" 506 | 507 | [[package]] 508 | name = "windows_aarch64_msvc" 509 | version = "0.52.6" 510 | source = "registry+https://github.com/rust-lang/crates.io-index" 511 | checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" 512 | 513 | [[package]] 514 | name = "windows_i686_gnu" 515 | version = "0.52.6" 516 | source = "registry+https://github.com/rust-lang/crates.io-index" 517 | checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" 518 | 519 | [[package]] 520 | name = "windows_i686_gnullvm" 521 | version = "0.52.6" 522 | source = "registry+https://github.com/rust-lang/crates.io-index" 523 | checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" 524 | 525 | [[package]] 526 | name = "windows_i686_msvc" 527 | version = "0.52.6" 528 | source = "registry+https://github.com/rust-lang/crates.io-index" 529 | checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" 530 | 531 | [[package]] 532 | name = "windows_x86_64_gnu" 533 | version = "0.52.6" 534 | source = "registry+https://github.com/rust-lang/crates.io-index" 535 | checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" 536 | 537 | [[package]] 538 | name = "windows_x86_64_gnullvm" 539 | version = "0.52.6" 540 | source = "registry+https://github.com/rust-lang/crates.io-index" 541 | checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" 542 | 543 | [[package]] 544 | name = "windows_x86_64_msvc" 545 | version = "0.52.6" 546 | source = "registry+https://github.com/rust-lang/crates.io-index" 547 | checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" 548 | 549 | [[package]] 550 | name = "winnow" 551 | version = "0.5.40" 552 | source = "registry+https://github.com/rust-lang/crates.io-index" 553 | checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" 554 | dependencies = [ 555 | "memchr", 556 | ] 557 | 558 | [[package]] 559 | name = "workshop-runner" 560 | version = "0.2.5" 561 | dependencies = [ 562 | "anyhow", 563 | "clap", 564 | "fs-err", 565 | "indexmap", 566 | "read_input", 567 | "regex", 568 | "rusqlite", 569 | "serde", 570 | "textwrap", 571 | "toml", 572 | "yansi", 573 | ] 574 | 575 | [[package]] 576 | name = "yansi" 577 | version = "0.5.1" 578 | source = "registry+https://github.com/rust-lang/crates.io-index" 579 | checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" 580 | 581 | [[package]] 582 | name = "zerocopy" 583 | version = "0.7.35" 584 | source = "registry+https://github.com/rust-lang/crates.io-index" 585 | checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" 586 | dependencies = [ 587 | "zerocopy-derive", 588 | ] 589 | 590 | [[package]] 591 | name = "zerocopy-derive" 592 | version = "0.7.35" 593 | source = "registry+https://github.com/rust-lang/crates.io-index" 594 | checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" 595 | dependencies = [ 596 | "proc-macro2", 597 | "quote", 598 | "syn", 599 | ] 600 | --------------------------------------------------------------------------------