├── .github └── workflows │ ├── container.yml │ └── pages.yml ├── .gitignore ├── .vscode ├── launch.json └── tasks.json ├── CHANGELOG.md ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── LICENSE ├── README.md ├── assets ├── dark.js ├── feather-sprite.svg ├── search.js └── style.css ├── azure-pipelines.yml ├── docs ├── 01-installation.md ├── 02-useage.md ├── 03-configuration.md ├── 04-markdown-features.md ├── design-documents │ ├── 01-new-design.md │ ├── 02-toculate.md │ ├── 03-highlight.md │ ├── 04-layouts.md │ ├── 05-doctree.md │ └── README.md ├── footer.md ├── img │ └── example_asset.png └── index.md └── src ├── args.rs ├── asset.rs ├── docket.rs ├── doctree.rs ├── error.rs ├── highlight.rs ├── main.rs ├── render.rs ├── render ├── layout.rs └── layout │ └── html.rs ├── search.rs ├── toc.rs └── utils.rs /.github/workflows/container.yml: -------------------------------------------------------------------------------- 1 | name: Build Docker Container 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | tags: ["v*"] 7 | 8 | pull_request: 9 | branches: ["main"] 10 | 11 | workflow_dispatch: 12 | 13 | permissions: 14 | packages: write 15 | 16 | jobs: 17 | deploy: 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Login to GitHub Container Registry 21 | uses: docker/login-action@v2 22 | with: 23 | registry: ghcr.io 24 | username: ${{ github.actor }} 25 | password: ${{ secrets.GITHUB_TOKEN }} 26 | 27 | - name: Build and Push Container 28 | uses: docker/build-push-action@v3 29 | with: 30 | push: true 31 | tags: | 32 | ghcr.io/${{ github.repository }}:latest 33 | -------------------------------------------------------------------------------- /.github/workflows/pages.yml: -------------------------------------------------------------------------------- 1 | name: Publish GitHub Pages 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | 7 | pull_request: 8 | branches: ["main"] 9 | 10 | workflow_dispatch: 11 | 12 | permissions: 13 | contents: read 14 | pages: write 15 | id-token: write 16 | 17 | # Allow one concurrent deployment 18 | concurrency: 19 | group: "pages" 20 | cancel-in-progress: true 21 | 22 | jobs: 23 | build: 24 | runs-on: ubuntu-latest 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v3 28 | - name: Setup Pages 29 | uses: actions/configure-pages@v2 30 | - name: Build Site 31 | run: cargo run --no-default-features --features=syntect-hl -- -s docs -t build 32 | - name: Upload artifact 33 | uses: actions/upload-pages-artifact@v1 34 | with: 35 | # Upload build output directory 36 | path: 'build' 37 | deploy: 38 | name: Deploy 39 | runs-on: ubuntu-latest 40 | needs: build 41 | environment: 42 | name: github-pages 43 | url: ${{ steps.deployment.outputs.page_url }} 44 | steps: 45 | - name: Setup Pages 46 | uses: actions/configure-pages@v2 47 | - name: Deploy GitHub Pages 48 | id: deployment 49 | uses: actions/deploy-pages@v1 50 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | **/*.rs.bk 3 | output/ 4 | build/ 5 | docs/build/ 6 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "lldb", 9 | "request": "launch", 10 | "name": "Debug executable 'docket'", 11 | "cargo": { 12 | "args": [ 13 | "build", 14 | "--bin=docket", 15 | "--package=docket" 16 | ], 17 | "filter": { 18 | "name": "docket", 19 | "kind": "bin" 20 | } 21 | }, 22 | "args": "-s docs/", 23 | "cwd": "${workspaceFolder}", 24 | }, 25 | { 26 | "type": "lldb", 27 | "request": "launch", 28 | "name": "Debug unit tests in executable 'docket'", 29 | "cargo": { 30 | "args": [ 31 | "test", 32 | "--no-run", 33 | "--bin=docket", 34 | "--package=docket" 35 | ], 36 | "filter": { 37 | "name": "docket", 38 | "kind": "bin" 39 | } 40 | }, 41 | "args": [], 42 | "cwd": "${workspaceFolder}" 43 | } 44 | ] 45 | } -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "type": "cargo", 6 | "command": "build", 7 | "problemMatcher": [ 8 | "$rustc" 9 | ], 10 | "group": { 11 | "kind": "build", 12 | "isDefault": true 13 | } 14 | }, 15 | { 16 | "type": "cargo", 17 | "command": "test", 18 | "problemMatcher": [ 19 | "$rustc" 20 | ], 21 | "group": { 22 | "kind": "test", 23 | "isDefault": true 24 | } 25 | }, 26 | { 27 | "type": "cargo", 28 | "command": "fmt", 29 | "problemMatcher": [ 30 | "$rustc" 31 | ], 32 | } 33 | ] 34 | } -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | This page lists the main headline changes that make up each release of `docket`. 4 | It is intended as a useful reference to the development history of the project, 5 | but is by no means an exhaustive list of all changes. 6 | 7 | ## 0.6.1 8 | 9 | * Improved tokenisation for the search index. Prevents some trivial tokens from 10 | ending up in the search index when they shouldn't have been. 11 | 12 | ## 0.6.0 13 | 14 | * Add javascript search. A json search index is built when the site is rendred. 15 | Javascript on the frontend uses progressive enhancement to inject a search 16 | box. 17 | * Documentation improvements. 18 | 19 | ## 0.5.0 20 | 21 | * Enable more of `pulldown-cmark`'s Markdown extensions. 22 | * Support for highlighting code using `syntect` _or_ `hl-js`. 23 | 24 | ## 0.4.1 25 | 26 | * Added a feature `watch` which uses file system notifications to watch for 27 | changes in the source folder and re-build the site when it is changed. This 28 | allows users to opt-out of the `notify` crate dependency if the feature isn't 29 | needed. 30 | * Added a feature `par_render` which uses Rayon to render pages in parallel. 31 | This allows users to opt-out of the Rayon dependency if the feature isn't 32 | needed. 33 | 34 | ## 0.3.0 35 | 36 | * Upgraded to Pulldown-cmark `0.4`. This version is a major change to the way 37 | Pulldown parses markdown and should provide some performance improvements on 38 | larger files. 39 | * Docket now has a working CI configuration! 40 | 41 | ## 0.2.5 42 | 43 | * Generated HTML has a `charset` meta tag. This should fix rendering 44 | of non-ASCII characters. 45 | 46 | ## 0.2.4 47 | 48 | * Parallel rendering of pages with Rayon. 49 | 50 | ## 0.2.3 51 | 52 | * Rust 2018 support. 53 | 54 | ## 0.2.1 55 | 56 | * Fixup to generated OTC to prevent an unnecessary redirect. Previously links 57 | were missing a trailing `/`. This causes a re-direct to the directory with 58 | the trailing `/` before the page is rendered. 59 | 60 | ## 0.2.0 61 | 62 | * File watcher support with the `-w`, and `--watch` flags (#2) 63 | 64 | ## 0.1.7 65 | 66 | * Sort pages by name. The prefix is droped. This allows adding `01-` and 67 | similar numeric prefixes to pages to change their order within the generated 68 | site. 69 | 70 | ## 0.1.5 71 | 72 | * Add a `--version` flag to print out the version of `docket`. 73 | * Improved rendering of code blocks 74 | * Stop rendering the style inline in each page. Reduces page bloat. 75 | 76 | ## 0.1.4 77 | 78 | * Support mobile browsers with the `viewport` `meta` tag. 79 | 80 | ## 0.1.3 81 | 82 | * Initial "usable" version. -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "adler" 7 | version = "1.0.2" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" 10 | 11 | [[package]] 12 | name = "aho-corasick" 13 | version = "0.7.18" 14 | source = "registry+https://github.com/rust-lang/crates.io-index" 15 | checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" 16 | dependencies = [ 17 | "memchr", 18 | ] 19 | 20 | [[package]] 21 | name = "atty" 22 | version = "0.2.14" 23 | source = "registry+https://github.com/rust-lang/crates.io-index" 24 | checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" 25 | dependencies = [ 26 | "hermit-abi", 27 | "libc", 28 | "winapi 0.3.9", 29 | ] 30 | 31 | [[package]] 32 | name = "autocfg" 33 | version = "1.1.0" 34 | source = "registry+https://github.com/rust-lang/crates.io-index" 35 | checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" 36 | 37 | [[package]] 38 | name = "base64" 39 | version = "0.13.0" 40 | source = "registry+https://github.com/rust-lang/crates.io-index" 41 | checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" 42 | 43 | [[package]] 44 | name = "bincode" 45 | version = "1.3.3" 46 | source = "registry+https://github.com/rust-lang/crates.io-index" 47 | checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" 48 | dependencies = [ 49 | "serde", 50 | ] 51 | 52 | [[package]] 53 | name = "bitflags" 54 | version = "1.3.2" 55 | source = "registry+https://github.com/rust-lang/crates.io-index" 56 | checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" 57 | 58 | [[package]] 59 | name = "cc" 60 | version = "1.0.73" 61 | source = "registry+https://github.com/rust-lang/crates.io-index" 62 | checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" 63 | 64 | [[package]] 65 | name = "cfg-if" 66 | version = "0.1.10" 67 | source = "registry+https://github.com/rust-lang/crates.io-index" 68 | checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" 69 | 70 | [[package]] 71 | name = "cfg-if" 72 | version = "1.0.0" 73 | source = "registry+https://github.com/rust-lang/crates.io-index" 74 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 75 | 76 | [[package]] 77 | name = "crc32fast" 78 | version = "1.3.2" 79 | source = "registry+https://github.com/rust-lang/crates.io-index" 80 | checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" 81 | dependencies = [ 82 | "cfg-if 1.0.0", 83 | ] 84 | 85 | [[package]] 86 | name = "docket" 87 | version = "0.7.0" 88 | dependencies = [ 89 | "docopt", 90 | "env_logger", 91 | "log", 92 | "notify", 93 | "once_cell", 94 | "pulldown-cmark", 95 | "serde", 96 | "serde_json", 97 | "syntect", 98 | ] 99 | 100 | [[package]] 101 | name = "docopt" 102 | version = "1.1.1" 103 | source = "registry+https://github.com/rust-lang/crates.io-index" 104 | checksum = "7f3f119846c823f9eafcf953a8f6ffb6ed69bf6240883261a7f13b634579a51f" 105 | dependencies = [ 106 | "lazy_static", 107 | "regex", 108 | "serde", 109 | "strsim", 110 | ] 111 | 112 | [[package]] 113 | name = "env_logger" 114 | version = "0.9.0" 115 | source = "registry+https://github.com/rust-lang/crates.io-index" 116 | checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3" 117 | dependencies = [ 118 | "atty", 119 | "humantime", 120 | "log", 121 | "regex", 122 | "termcolor", 123 | ] 124 | 125 | [[package]] 126 | name = "filetime" 127 | version = "0.2.17" 128 | source = "registry+https://github.com/rust-lang/crates.io-index" 129 | checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c" 130 | dependencies = [ 131 | "cfg-if 1.0.0", 132 | "libc", 133 | "redox_syscall", 134 | "windows-sys", 135 | ] 136 | 137 | [[package]] 138 | name = "flate2" 139 | version = "1.0.24" 140 | source = "registry+https://github.com/rust-lang/crates.io-index" 141 | checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" 142 | dependencies = [ 143 | "crc32fast", 144 | "miniz_oxide", 145 | ] 146 | 147 | [[package]] 148 | name = "fnv" 149 | version = "1.0.7" 150 | source = "registry+https://github.com/rust-lang/crates.io-index" 151 | checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" 152 | 153 | [[package]] 154 | name = "fsevent" 155 | version = "0.4.0" 156 | source = "registry+https://github.com/rust-lang/crates.io-index" 157 | checksum = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" 158 | dependencies = [ 159 | "bitflags", 160 | "fsevent-sys", 161 | ] 162 | 163 | [[package]] 164 | name = "fsevent-sys" 165 | version = "2.0.1" 166 | source = "registry+https://github.com/rust-lang/crates.io-index" 167 | checksum = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" 168 | dependencies = [ 169 | "libc", 170 | ] 171 | 172 | [[package]] 173 | name = "fuchsia-zircon" 174 | version = "0.3.3" 175 | source = "registry+https://github.com/rust-lang/crates.io-index" 176 | checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" 177 | dependencies = [ 178 | "bitflags", 179 | "fuchsia-zircon-sys", 180 | ] 181 | 182 | [[package]] 183 | name = "fuchsia-zircon-sys" 184 | version = "0.3.3" 185 | source = "registry+https://github.com/rust-lang/crates.io-index" 186 | checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" 187 | 188 | [[package]] 189 | name = "hashbrown" 190 | version = "0.12.3" 191 | source = "registry+https://github.com/rust-lang/crates.io-index" 192 | checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" 193 | 194 | [[package]] 195 | name = "hermit-abi" 196 | version = "0.1.19" 197 | source = "registry+https://github.com/rust-lang/crates.io-index" 198 | checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" 199 | dependencies = [ 200 | "libc", 201 | ] 202 | 203 | [[package]] 204 | name = "humantime" 205 | version = "2.1.0" 206 | source = "registry+https://github.com/rust-lang/crates.io-index" 207 | checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" 208 | 209 | [[package]] 210 | name = "indexmap" 211 | version = "1.9.1" 212 | source = "registry+https://github.com/rust-lang/crates.io-index" 213 | checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" 214 | dependencies = [ 215 | "autocfg", 216 | "hashbrown", 217 | ] 218 | 219 | [[package]] 220 | name = "inotify" 221 | version = "0.7.1" 222 | source = "registry+https://github.com/rust-lang/crates.io-index" 223 | checksum = "4816c66d2c8ae673df83366c18341538f234a26d65a9ecea5c348b453ac1d02f" 224 | dependencies = [ 225 | "bitflags", 226 | "inotify-sys", 227 | "libc", 228 | ] 229 | 230 | [[package]] 231 | name = "inotify-sys" 232 | version = "0.1.5" 233 | source = "registry+https://github.com/rust-lang/crates.io-index" 234 | checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" 235 | dependencies = [ 236 | "libc", 237 | ] 238 | 239 | [[package]] 240 | name = "iovec" 241 | version = "0.1.4" 242 | source = "registry+https://github.com/rust-lang/crates.io-index" 243 | checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" 244 | dependencies = [ 245 | "libc", 246 | ] 247 | 248 | [[package]] 249 | name = "itoa" 250 | version = "1.0.2" 251 | source = "registry+https://github.com/rust-lang/crates.io-index" 252 | checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" 253 | 254 | [[package]] 255 | name = "kernel32-sys" 256 | version = "0.2.2" 257 | source = "registry+https://github.com/rust-lang/crates.io-index" 258 | checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" 259 | dependencies = [ 260 | "winapi 0.2.8", 261 | "winapi-build", 262 | ] 263 | 264 | [[package]] 265 | name = "lazy_static" 266 | version = "1.4.0" 267 | source = "registry+https://github.com/rust-lang/crates.io-index" 268 | checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" 269 | 270 | [[package]] 271 | name = "lazycell" 272 | version = "1.3.0" 273 | source = "registry+https://github.com/rust-lang/crates.io-index" 274 | checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" 275 | 276 | [[package]] 277 | name = "libc" 278 | version = "0.2.126" 279 | source = "registry+https://github.com/rust-lang/crates.io-index" 280 | checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" 281 | 282 | [[package]] 283 | name = "line-wrap" 284 | version = "0.1.1" 285 | source = "registry+https://github.com/rust-lang/crates.io-index" 286 | checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9" 287 | dependencies = [ 288 | "safemem", 289 | ] 290 | 291 | [[package]] 292 | name = "linked-hash-map" 293 | version = "0.5.6" 294 | source = "registry+https://github.com/rust-lang/crates.io-index" 295 | checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" 296 | 297 | [[package]] 298 | name = "log" 299 | version = "0.4.17" 300 | source = "registry+https://github.com/rust-lang/crates.io-index" 301 | checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" 302 | dependencies = [ 303 | "cfg-if 1.0.0", 304 | ] 305 | 306 | [[package]] 307 | name = "memchr" 308 | version = "2.5.0" 309 | source = "registry+https://github.com/rust-lang/crates.io-index" 310 | checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" 311 | 312 | [[package]] 313 | name = "miniz_oxide" 314 | version = "0.5.3" 315 | source = "registry+https://github.com/rust-lang/crates.io-index" 316 | checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc" 317 | dependencies = [ 318 | "adler", 319 | ] 320 | 321 | [[package]] 322 | name = "mio" 323 | version = "0.6.23" 324 | source = "registry+https://github.com/rust-lang/crates.io-index" 325 | checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4" 326 | dependencies = [ 327 | "cfg-if 0.1.10", 328 | "fuchsia-zircon", 329 | "fuchsia-zircon-sys", 330 | "iovec", 331 | "kernel32-sys", 332 | "libc", 333 | "log", 334 | "miow", 335 | "net2", 336 | "slab", 337 | "winapi 0.2.8", 338 | ] 339 | 340 | [[package]] 341 | name = "mio-extras" 342 | version = "2.0.6" 343 | source = "registry+https://github.com/rust-lang/crates.io-index" 344 | checksum = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19" 345 | dependencies = [ 346 | "lazycell", 347 | "log", 348 | "mio", 349 | "slab", 350 | ] 351 | 352 | [[package]] 353 | name = "miow" 354 | version = "0.2.2" 355 | source = "registry+https://github.com/rust-lang/crates.io-index" 356 | checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d" 357 | dependencies = [ 358 | "kernel32-sys", 359 | "net2", 360 | "winapi 0.2.8", 361 | "ws2_32-sys", 362 | ] 363 | 364 | [[package]] 365 | name = "net2" 366 | version = "0.2.37" 367 | source = "registry+https://github.com/rust-lang/crates.io-index" 368 | checksum = "391630d12b68002ae1e25e8f974306474966550ad82dac6886fb8910c19568ae" 369 | dependencies = [ 370 | "cfg-if 0.1.10", 371 | "libc", 372 | "winapi 0.3.9", 373 | ] 374 | 375 | [[package]] 376 | name = "notify" 377 | version = "4.0.17" 378 | source = "registry+https://github.com/rust-lang/crates.io-index" 379 | checksum = "ae03c8c853dba7bfd23e571ff0cff7bc9dceb40a4cd684cd1681824183f45257" 380 | dependencies = [ 381 | "bitflags", 382 | "filetime", 383 | "fsevent", 384 | "fsevent-sys", 385 | "inotify", 386 | "libc", 387 | "mio", 388 | "mio-extras", 389 | "walkdir", 390 | "winapi 0.3.9", 391 | ] 392 | 393 | [[package]] 394 | name = "num_threads" 395 | version = "0.1.6" 396 | source = "registry+https://github.com/rust-lang/crates.io-index" 397 | checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" 398 | dependencies = [ 399 | "libc", 400 | ] 401 | 402 | [[package]] 403 | name = "once_cell" 404 | version = "1.13.0" 405 | source = "registry+https://github.com/rust-lang/crates.io-index" 406 | checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1" 407 | 408 | [[package]] 409 | name = "onig" 410 | version = "6.3.2" 411 | source = "registry+https://github.com/rust-lang/crates.io-index" 412 | checksum = "1eb3502504c9c8b06634b38bfdda86a9a8cef6277f3dec4d8b17c115110dd2a3" 413 | dependencies = [ 414 | "bitflags", 415 | "lazy_static", 416 | "libc", 417 | "onig_sys", 418 | ] 419 | 420 | [[package]] 421 | name = "onig_sys" 422 | version = "69.8.0" 423 | source = "registry+https://github.com/rust-lang/crates.io-index" 424 | checksum = "8bf3fbc9b931b6c9af85d219c7943c274a6ad26cff7488a2210215edd5f49bf8" 425 | dependencies = [ 426 | "cc", 427 | "pkg-config", 428 | ] 429 | 430 | [[package]] 431 | name = "pkg-config" 432 | version = "0.3.25" 433 | source = "registry+https://github.com/rust-lang/crates.io-index" 434 | checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" 435 | 436 | [[package]] 437 | name = "plist" 438 | version = "1.3.1" 439 | source = "registry+https://github.com/rust-lang/crates.io-index" 440 | checksum = "bd39bc6cdc9355ad1dc5eeedefee696bb35c34caf21768741e81826c0bbd7225" 441 | dependencies = [ 442 | "base64", 443 | "indexmap", 444 | "line-wrap", 445 | "serde", 446 | "time", 447 | "xml-rs", 448 | ] 449 | 450 | [[package]] 451 | name = "proc-macro2" 452 | version = "1.0.40" 453 | source = "registry+https://github.com/rust-lang/crates.io-index" 454 | checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7" 455 | dependencies = [ 456 | "unicode-ident", 457 | ] 458 | 459 | [[package]] 460 | name = "pulldown-cmark" 461 | version = "0.9.1" 462 | source = "registry+https://github.com/rust-lang/crates.io-index" 463 | checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6" 464 | dependencies = [ 465 | "bitflags", 466 | "memchr", 467 | "unicase", 468 | ] 469 | 470 | [[package]] 471 | name = "quote" 472 | version = "1.0.20" 473 | source = "registry+https://github.com/rust-lang/crates.io-index" 474 | checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804" 475 | dependencies = [ 476 | "proc-macro2", 477 | ] 478 | 479 | [[package]] 480 | name = "redox_syscall" 481 | version = "0.2.13" 482 | source = "registry+https://github.com/rust-lang/crates.io-index" 483 | checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" 484 | dependencies = [ 485 | "bitflags", 486 | ] 487 | 488 | [[package]] 489 | name = "regex" 490 | version = "1.6.0" 491 | source = "registry+https://github.com/rust-lang/crates.io-index" 492 | checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" 493 | dependencies = [ 494 | "aho-corasick", 495 | "memchr", 496 | "regex-syntax", 497 | ] 498 | 499 | [[package]] 500 | name = "regex-syntax" 501 | version = "0.6.27" 502 | source = "registry+https://github.com/rust-lang/crates.io-index" 503 | checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" 504 | 505 | [[package]] 506 | name = "ryu" 507 | version = "1.0.10" 508 | source = "registry+https://github.com/rust-lang/crates.io-index" 509 | checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" 510 | 511 | [[package]] 512 | name = "safemem" 513 | version = "0.3.3" 514 | source = "registry+https://github.com/rust-lang/crates.io-index" 515 | checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" 516 | 517 | [[package]] 518 | name = "same-file" 519 | version = "1.0.6" 520 | source = "registry+https://github.com/rust-lang/crates.io-index" 521 | checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" 522 | dependencies = [ 523 | "winapi-util", 524 | ] 525 | 526 | [[package]] 527 | name = "serde" 528 | version = "1.0.139" 529 | source = "registry+https://github.com/rust-lang/crates.io-index" 530 | checksum = "0171ebb889e45aa68b44aee0859b3eede84c6f5f5c228e6f140c0b2a0a46cad6" 531 | dependencies = [ 532 | "serde_derive", 533 | ] 534 | 535 | [[package]] 536 | name = "serde_derive" 537 | version = "1.0.139" 538 | source = "registry+https://github.com/rust-lang/crates.io-index" 539 | checksum = "dc1d3230c1de7932af58ad8ffbe1d784bd55efd5a9d84ac24f69c72d83543dfb" 540 | dependencies = [ 541 | "proc-macro2", 542 | "quote", 543 | "syn", 544 | ] 545 | 546 | [[package]] 547 | name = "serde_json" 548 | version = "1.0.82" 549 | source = "registry+https://github.com/rust-lang/crates.io-index" 550 | checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7" 551 | dependencies = [ 552 | "itoa", 553 | "ryu", 554 | "serde", 555 | ] 556 | 557 | [[package]] 558 | name = "slab" 559 | version = "0.4.7" 560 | source = "registry+https://github.com/rust-lang/crates.io-index" 561 | checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" 562 | dependencies = [ 563 | "autocfg", 564 | ] 565 | 566 | [[package]] 567 | name = "strsim" 568 | version = "0.10.0" 569 | source = "registry+https://github.com/rust-lang/crates.io-index" 570 | checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" 571 | 572 | [[package]] 573 | name = "syn" 574 | version = "1.0.98" 575 | source = "registry+https://github.com/rust-lang/crates.io-index" 576 | checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd" 577 | dependencies = [ 578 | "proc-macro2", 579 | "quote", 580 | "unicode-ident", 581 | ] 582 | 583 | [[package]] 584 | name = "syntect" 585 | version = "5.0.0" 586 | source = "registry+https://github.com/rust-lang/crates.io-index" 587 | checksum = "c6c454c27d9d7d9a84c7803aaa3c50cd088d2906fe3c6e42da3209aa623576a8" 588 | dependencies = [ 589 | "bincode", 590 | "bitflags", 591 | "flate2", 592 | "fnv", 593 | "lazy_static", 594 | "once_cell", 595 | "onig", 596 | "plist", 597 | "regex-syntax", 598 | "serde", 599 | "serde_derive", 600 | "serde_json", 601 | "thiserror", 602 | "walkdir", 603 | "yaml-rust", 604 | ] 605 | 606 | [[package]] 607 | name = "termcolor" 608 | version = "1.1.3" 609 | source = "registry+https://github.com/rust-lang/crates.io-index" 610 | checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" 611 | dependencies = [ 612 | "winapi-util", 613 | ] 614 | 615 | [[package]] 616 | name = "thiserror" 617 | version = "1.0.31" 618 | source = "registry+https://github.com/rust-lang/crates.io-index" 619 | checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" 620 | dependencies = [ 621 | "thiserror-impl", 622 | ] 623 | 624 | [[package]] 625 | name = "thiserror-impl" 626 | version = "1.0.31" 627 | source = "registry+https://github.com/rust-lang/crates.io-index" 628 | checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" 629 | dependencies = [ 630 | "proc-macro2", 631 | "quote", 632 | "syn", 633 | ] 634 | 635 | [[package]] 636 | name = "time" 637 | version = "0.3.11" 638 | source = "registry+https://github.com/rust-lang/crates.io-index" 639 | checksum = "72c91f41dcb2f096c05f0873d667dceec1087ce5bcf984ec8ffb19acddbb3217" 640 | dependencies = [ 641 | "itoa", 642 | "libc", 643 | "num_threads", 644 | ] 645 | 646 | [[package]] 647 | name = "unicase" 648 | version = "2.6.0" 649 | source = "registry+https://github.com/rust-lang/crates.io-index" 650 | checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" 651 | dependencies = [ 652 | "version_check", 653 | ] 654 | 655 | [[package]] 656 | name = "unicode-ident" 657 | version = "1.0.2" 658 | source = "registry+https://github.com/rust-lang/crates.io-index" 659 | checksum = "15c61ba63f9235225a22310255a29b806b907c9b8c964bcbd0a2c70f3f2deea7" 660 | 661 | [[package]] 662 | name = "version_check" 663 | version = "0.9.4" 664 | source = "registry+https://github.com/rust-lang/crates.io-index" 665 | checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" 666 | 667 | [[package]] 668 | name = "walkdir" 669 | version = "2.3.2" 670 | source = "registry+https://github.com/rust-lang/crates.io-index" 671 | checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" 672 | dependencies = [ 673 | "same-file", 674 | "winapi 0.3.9", 675 | "winapi-util", 676 | ] 677 | 678 | [[package]] 679 | name = "winapi" 680 | version = "0.2.8" 681 | source = "registry+https://github.com/rust-lang/crates.io-index" 682 | checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" 683 | 684 | [[package]] 685 | name = "winapi" 686 | version = "0.3.9" 687 | source = "registry+https://github.com/rust-lang/crates.io-index" 688 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 689 | dependencies = [ 690 | "winapi-i686-pc-windows-gnu", 691 | "winapi-x86_64-pc-windows-gnu", 692 | ] 693 | 694 | [[package]] 695 | name = "winapi-build" 696 | version = "0.1.1" 697 | source = "registry+https://github.com/rust-lang/crates.io-index" 698 | checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" 699 | 700 | [[package]] 701 | name = "winapi-i686-pc-windows-gnu" 702 | version = "0.4.0" 703 | source = "registry+https://github.com/rust-lang/crates.io-index" 704 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 705 | 706 | [[package]] 707 | name = "winapi-util" 708 | version = "0.1.5" 709 | source = "registry+https://github.com/rust-lang/crates.io-index" 710 | checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" 711 | dependencies = [ 712 | "winapi 0.3.9", 713 | ] 714 | 715 | [[package]] 716 | name = "winapi-x86_64-pc-windows-gnu" 717 | version = "0.4.0" 718 | source = "registry+https://github.com/rust-lang/crates.io-index" 719 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 720 | 721 | [[package]] 722 | name = "windows-sys" 723 | version = "0.36.1" 724 | source = "registry+https://github.com/rust-lang/crates.io-index" 725 | checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" 726 | dependencies = [ 727 | "windows_aarch64_msvc", 728 | "windows_i686_gnu", 729 | "windows_i686_msvc", 730 | "windows_x86_64_gnu", 731 | "windows_x86_64_msvc", 732 | ] 733 | 734 | [[package]] 735 | name = "windows_aarch64_msvc" 736 | version = "0.36.1" 737 | source = "registry+https://github.com/rust-lang/crates.io-index" 738 | checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" 739 | 740 | [[package]] 741 | name = "windows_i686_gnu" 742 | version = "0.36.1" 743 | source = "registry+https://github.com/rust-lang/crates.io-index" 744 | checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" 745 | 746 | [[package]] 747 | name = "windows_i686_msvc" 748 | version = "0.36.1" 749 | source = "registry+https://github.com/rust-lang/crates.io-index" 750 | checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" 751 | 752 | [[package]] 753 | name = "windows_x86_64_gnu" 754 | version = "0.36.1" 755 | source = "registry+https://github.com/rust-lang/crates.io-index" 756 | checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" 757 | 758 | [[package]] 759 | name = "windows_x86_64_msvc" 760 | version = "0.36.1" 761 | source = "registry+https://github.com/rust-lang/crates.io-index" 762 | checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" 763 | 764 | [[package]] 765 | name = "ws2_32-sys" 766 | version = "0.2.1" 767 | source = "registry+https://github.com/rust-lang/crates.io-index" 768 | checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" 769 | dependencies = [ 770 | "winapi 0.2.8", 771 | "winapi-build", 772 | ] 773 | 774 | [[package]] 775 | name = "xml-rs" 776 | version = "0.8.4" 777 | source = "registry+https://github.com/rust-lang/crates.io-index" 778 | checksum = "d2d7d3948613f75c98fd9328cfdcc45acc4d360655289d0a7d4ec931392200a3" 779 | 780 | [[package]] 781 | name = "yaml-rust" 782 | version = "0.4.5" 783 | source = "registry+https://github.com/rust-lang/crates.io-index" 784 | checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" 785 | dependencies = [ 786 | "linked-hash-map", 787 | ] 788 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "docket" 3 | version = "0.7.1" 4 | description = "Simple markdown to HTML documentation rendering" 5 | readme = "README.md" 6 | license-file = "LICENSE" 7 | documentation = "https://docs.rs/crate/docket/latest" 8 | homepage = "https://iwillspeak.github.io/docket" 9 | repository = "https://github.com/iwillspeak/docket" 10 | authors = ["Will Speak "] 11 | edition = "2021" 12 | exclude = ["/docs/*", "/.vscode/*"] 13 | 14 | [dependencies] 15 | pulldown-cmark = { version= "0.9", default_features = false } 16 | docopt = "1.1" 17 | serde = { version = "1.0", features = ["derive"] } 18 | serde_json = "1.0" 19 | log = "0.4" 20 | env_logger = "0.9" 21 | syntect = { version = "5.0", optional = true } 22 | once_cell = { version = "1.13", optional = true } 23 | notify = { version = "4.0", optional = true } 24 | 25 | [features] 26 | default = [ "watch", "syntect-hl" ] 27 | 28 | # File system watching 29 | watch = [ "notify" ] 30 | 31 | # Sytnax highlighting with syntect 32 | syntect-hl = [ "once_cell", "syntect" ] 33 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:latest as build 2 | WORKDIR /usr/src/docket 3 | COPY . . 4 | RUN cargo install --path . 5 | 6 | FROM debian:buster-slim as runtime 7 | # RUN apt-get update && apt-get install -y extra-runtime-dependencies && rm -rf /var/lib/apt/lists/* 8 | COPY --from=build /usr/local/cargo/bin/docket /usr/local/bin/docket 9 | CMD ['/usr/local/bin/docket'] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Docket 2 | 3 | [![Build Status][build_badge_image]][build_info] [![Build Docker Container][docker_badge_image]][docker_info] 4 | 5 | Simple markdown to HTML documentation rendering. Docket aims to be a Rust clone of [`d`](https://github.com/sjl/d). 6 | 7 | ## Key Features 8 | 9 | * Binary which can be installed with `cargo install` 10 | * Command line argument parsing with [Docopt](https://docs.rs/docopt/0.8.1/docopt/) 11 | * Markdown rendering with `pulldown-cmark`. 12 | * Syntax highlighting with [Syntect](https://github.com/trishume/syntect/). 13 | * Javascript powered search. 14 | * Zero-configuration. 15 | 16 | ## Installation 17 | 18 | Docket can be installed with cargo via `cargo install docket`. Once installed you should be able to run it form the command line as `docket`. 19 | 20 | Docket has two Cargo features which are enabled by default. You can disable them with `--no-default-features` when installing if you don't need them to save some time. 21 | 22 | * `watch` - Support for watching files and re-generating the output folder when changes are made. 23 | * `par_render` - Support for rendering pages in parallel using the Rayon crate. 24 | 25 | ## Getting Started 26 | 27 | To begin creating your documentation create a new `docs/` folder at the root of your repository. Add a file called `index.md` with a short markdown description of the project. Add pages by creating new markdown files in the `docs/` folder. Each page should have a level-1 heading at the beginning which is treated as the title of the page. 28 | 29 | To render the HTML output change to the `docs/` folder and run `docket`. This should create a new `docs/build/` folder containing the rendered site; ready to be published to a web-server or served with GitHub Pages. For more information about setup and configuration [check out the docs](https://iwillspeak.github.io/docket/). 30 | 31 | [build_badge_image]: https://dev.azure.com/iwillspeak/GitHub/_apis/build/status/iwillspeak.docket?branchName=main 32 | [build_info]: https://dev.azure.com/iwillspeak/GitHub/_build/latest?definitionId=1&branchName=main 33 | [docker_badge_image]: https://github.com/iwillspeak/docket/actions/workflows/container.yml/badge.svg 34 | [docker_info]: https://github.com/iwillspeak/docket/actions/workflows/container.yml -------------------------------------------------------------------------------- /assets/dark.js: -------------------------------------------------------------------------------- 1 | const darkModePlaceholder = document.getElementById("dark-mode-placeholder"); 2 | const prefersDarkMode = window.matchMedia("(prefers-color-scheme: dark)"); 3 | 4 | const url = import.meta.url; 5 | const rootPath = url.substring(0, url.lastIndexOf('/') + 1) 6 | 7 | const sunIcon = `sun` 8 | const moonIcon = `moon` 9 | 10 | const setIconFromMode = (button, mode) => { 11 | if (mode === undefined) { 12 | mode = prefersDarkMode.matches ? "dark" : "light"; 13 | } 14 | 15 | button.innerHTML = ` 24 | 26 | ` 27 | }; 28 | 29 | const toggleMode = () => { 30 | const dataSet = document.documentElement.dataset; 31 | const currentMode = dataSet.colorMode; 32 | 33 | if (currentMode === undefined) { 34 | if (prefersDarkMode.matches) { 35 | dataSet.colorMode = "light"; 36 | } else { 37 | dataSet.colorMode = "dark"; 38 | } 39 | localStorage.setItem("stashed-theme", dataSet.colorMode); 40 | } else { 41 | delete dataSet.colorMode; 42 | localStorage.removeItem("stashed-theme"); 43 | } 44 | 45 | return dataSet.colorMode; 46 | } 47 | 48 | const stashedTheme = localStorage.getItem("stashed-theme"); 49 | if (stashedTheme) { 50 | document.documentElement.dataset.colorMode = stashedTheme; 51 | } 52 | 53 | // Create a button and add it to the DOM to allow toggling dark mode. 54 | 55 | const darkModeButton = document.createElement("a"); 56 | darkModeButton.addEventListener("click", () => { 57 | setIconFromMode(darkModeButton, toggleMode()); 58 | }); 59 | setIconFromMode(darkModeButton, document.documentElement.dataset.colorMode); 60 | 61 | darkModePlaceholder.appendChild(darkModeButton); -------------------------------------------------------------------------------- /assets/feather-sprite.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /assets/search.js: -------------------------------------------------------------------------------- 1 | // # Javascript Progressive Search for Docket 2 | // 3 | // This file loads a search index that's pre-baked by `docket` at documentation 4 | // compile time. If the search index loads we inject the search box into the 5 | // page and enable the search. 6 | 7 | const initialiseSearch = async (rootPath, targetSelector) => { 8 | const searchForm = document.querySelector(targetSelector); 9 | if (searchForm == null) { 10 | return; 11 | } 12 | 13 | searchForm.innerHTML = `

Search

14 |
15 | 16 | 17 |
18 |
`; 19 | const searchBox = searchForm.querySelector('#query'); 20 | const searchResults = searchForm.querySelector('#docket-search-results'); 21 | 22 | const searchEntryForResult = result => { 23 | return `
  • ${result.title}
  • `; 24 | } 25 | 26 | const displayResults = results => { 27 | if (results.length == 0) { 28 | searchResults.innerHTML = "

    No results

    "; 29 | } else { 30 | searchResults.innerHTML = 31 | `

    ${results.length} results

    32 | `; 35 | } 36 | } 37 | 38 | const searchIndex = await fetch(`${rootPath}search_index.json`) 39 | .then(response => response.json()); 40 | 41 | const doSearch = query => { 42 | 43 | // If the search is empty clean up. 44 | if (query.trim().length == 0) { 45 | searchResults.innerHTML = ""; 46 | return; 47 | } 48 | 49 | let terms = query.split(/\W+/) 50 | .map(term => term.trim().toLowerCase()) 51 | .filter(term => term.length > 0); 52 | let found = [] 53 | 54 | searchIndex.forEach(page => { 55 | let score = 0; 56 | terms.forEach(term => { 57 | let termScore = page.terms[term]; 58 | if (termScore !== undefined) { 59 | score += termScore; 60 | } 61 | }); 62 | if (score > 0) { 63 | found.push({ 64 | score: score, 65 | page: page, 66 | }); 67 | } 68 | }); 69 | 70 | // Order them by the score, descending. 71 | found.sort((a, b) => b.score - a.score); 72 | 73 | displayResults(found.map(f => f.page)); 74 | } 75 | 76 | let timer = null; 77 | searchBox.addEventListener('keyup', event => { 78 | if (timer !== null) { 79 | clearTimeout(timer); 80 | } 81 | timer = setTimeout(() => { 82 | timer = null; 83 | doSearch(searchBox.value); 84 | }, 500); 85 | }); 86 | 87 | searchForm.addEventListener('submit', event => { 88 | event.preventDefault(); 89 | if (timer !== null) { 90 | clearTimeout(timer); 91 | timer = null; 92 | } 93 | doSearch(searchBox.value); 94 | }); 95 | } 96 | 97 | const uri = import.meta.url; 98 | initialiseSearch(uri.substring(0, uri.lastIndexOf('/') + 1), '#docket-search') -------------------------------------------------------------------------------- /assets/style.css: -------------------------------------------------------------------------------- 1 | :root { 2 | /* Colourschemes */ 3 | 4 | /* Sharp - Pinktacular accents */ 5 | --col-accent: #ee0b74; 6 | --col-dark: #020202; 7 | --col-dark-dimmed: #262826; 8 | --col-light-dimmed: #e9e9e9; 9 | --col-light: #fbfbfb; 10 | 11 | --col-bg: var(--col-light); 12 | --col-bg-dimmed: var(--col-light-dimmed); 13 | --col-fg: var(--col-dark); 14 | --col-fg-dimmed: var(--col-dark-dimmed); 15 | 16 | /* FONTS */ 17 | --fnt-base-size: 16px; 18 | --fnt-base-lineheight: 1.75; 19 | --fnt-family-body: 'Montserrat', sans-serif; 20 | 21 | /* SIZES */ 22 | --sz-header-height: 45px; 23 | --sz-content-padding: 0.5rem; 24 | 25 | color-scheme: light; 26 | accent-color: var(--col-accent); 27 | } 28 | 29 | /* Apply a dark color scheme */ 30 | @media (prefers-color-scheme: dark) { 31 | :root { 32 | --col-bg: var(--col-dark); 33 | --col-bg-dimmed: var(--col-dark-dimmed); 34 | --col-fg: var(--col-light); 35 | --col-fg-dimmed: var(--col-light-dimmed); 36 | 37 | color-scheme: dark; 38 | } 39 | } 40 | 41 | /* Dark mode override */ 42 | html[data-color-mode="dark"] { 43 | --col-bg: var(--col-dark); 44 | --col-bg-dimmed: var(--col-dark-dimmed); 45 | --col-fg: var(--col-light); 46 | --col-fg-dimmed: var(--col-light-dimmed); 47 | 48 | color-scheme: dark; 49 | } 50 | 51 | /* Light mode override */ 52 | html[data-color-mode="light"] { 53 | --col-bg: var(--col-light); 54 | --col-bg-dimmed: var(--col-light-dimmed); 55 | --col-fg: var(--col-dark); 56 | --col-fg-dimmed: var(--col-dark-dimmed); 57 | 58 | color-scheme: light; 59 | } 60 | 61 | .content { 62 | margin: 0 auto; 63 | padding: 0 var(--sz-content-padding); 64 | max-width: calc(100vw - (2 * var(--sz-content-padding))); 65 | } 66 | 67 | * { 68 | box-sizing: border-box; 69 | } 70 | 71 | /* Tablets */ 72 | @media screen and (min-width: 500px) { 73 | :root { 74 | --fnt-base-size: 17px; 75 | } 76 | 77 | .doc-grid { 78 | display: grid; 79 | grid-template-areas: "sidebar content"; 80 | grid-template-columns: minmax(0, 1.5fr) minmax(0, 2fr); 81 | gap: 3rem; 82 | } 83 | 84 | .doc-grid .sidebar, 85 | .doc-grid .toc-tree { 86 | grid-area: sidebar; 87 | } 88 | 89 | .doc-grid main { 90 | grid-area: content; 91 | } 92 | } 93 | 94 | /* Desktop */ 95 | @media screen and (min-width: 980px) { 96 | :root { 97 | --fnt-base-size: 18px; 98 | } 99 | 100 | .content { 101 | max-width: 1440px; 102 | } 103 | 104 | .doc-grid { 105 | grid-template-areas: "sidebar content toc"; 106 | grid-template-columns: minmax(0, 1.2fr) minmax(0, 3fr) minmax(0, 240px); 107 | } 108 | 109 | .doc-grid .sidebar, 110 | .doc-grid .toc-tree { 111 | position: sticky; 112 | max-height: calc(100vh - var(--sz-header-height)); 113 | top: 15px; 114 | overflow-y: auto; 115 | } 116 | 117 | .doc-grid .toc-tree { 118 | grid-area: toc; 119 | } 120 | } 121 | 122 | body { 123 | background-color: var(--col-bg); 124 | color: var(--col-fg); 125 | margin: 0; 126 | padding: 0; 127 | font-size: var(--fnt-base-size); 128 | line-height: var(--fnt-base-lineheight); 129 | font-family: var(--fnt-family-body); 130 | text-rendering: optimizeSpeed; 131 | min-height: 100vh; 132 | display: flex; 133 | flex-direction: column; 134 | } 135 | 136 | .doc-grid { 137 | flex-grow: 1; 138 | } 139 | 140 | /* ------------------------- HEADER AND BREADCRUMBS --------------------------*/ 141 | 142 | header.site-head { 143 | border-bottom: 1px solid var(--col-bg-dimmed); 144 | } 145 | 146 | header.site-head .content { 147 | display: flex; 148 | align-items: center; 149 | } 150 | 151 | .site-head #dark-mode-placeholder { 152 | border: 1px solid var(--col-bg-dimmed); 153 | border-radius: 3px; 154 | } 155 | 156 | .site-head #dark-mode-placeholder a { 157 | color: var(--col-fg); 158 | display: flex; 159 | align-items: center; 160 | justify-content: center; 161 | flex-direction: row; 162 | padding: 5px; 163 | width: 24px; 164 | height: 24px; 165 | } 166 | 167 | .breadcrumbs { 168 | height: var(--sz-header-height); 169 | display: flex; 170 | align-items: center; 171 | flex-direction: row; 172 | flex-grow: 1; 173 | } 174 | 175 | .breadcrumbs ol { 176 | list-style-type: none; 177 | display: flex; 178 | flex-direction: row; 179 | align-items: center baseline; 180 | margin: 0; 181 | padding: 0; 182 | } 183 | 184 | .breadcrumbs ol li::after { 185 | content: "/"; 186 | margin: 0 10px; 187 | } 188 | 189 | .breadcrumbs ol :first-child a { 190 | font-size: 1.1rem; 191 | font-weight: bold; 192 | } 193 | 194 | /* ------------------------------- TOC TREES ---------------------------------*/ 195 | 196 | /* in the sidebar */ 197 | .site-nav ul, 198 | .toc-tree ul { 199 | list-style-type: none; 200 | padding: 0; 201 | } 202 | 203 | .site-nav ul ul { 204 | padding-left: 1rem; 205 | } 206 | 207 | .toc-tree ul li li { 208 | margin-left: 1rem; 209 | } 210 | 211 | .toc-tree li { 212 | border-left: 3px solid var(--col-bg-dimmed); 213 | } 214 | 215 | .toc-tree li a { 216 | display: block; 217 | padding: 0.3rem 0 0.3rem 1rem; 218 | color: var(--col-fg-dimmed); 219 | } 220 | 221 | .toc-tree li a:hover { 222 | color: var(--col-accent); 223 | } 224 | 225 | /* general TOC */ 226 | 227 | ul.toc { 228 | list-style-type: none; 229 | padding: 0; 230 | } 231 | 232 | ul.toc ul.toc { 233 | padding-left: 1.5rem; 234 | } 235 | 236 | /* ------------------------------ MAIN ARTICLE -------------------------------*/ 237 | 238 | h1, 239 | h2, 240 | h3, 241 | h4, 242 | h5, 243 | h6 { 244 | margin: 1em 0 0 0; 245 | font-weight: bold; 246 | } 247 | 248 | h1 { 249 | font-size: 1.5rem; 250 | } 251 | 252 | h2 { 253 | font-size: 1.2rem; 254 | } 255 | 256 | h3 { 257 | font-size: 1.1rem; 258 | } 259 | 260 | h4, 261 | h5, 262 | h6 { 263 | font-size: 0.8rem; 264 | } 265 | 266 | img { 267 | max-width: 100%; 268 | display: block; 269 | margin: 0 auto; 270 | } 271 | 272 | a { 273 | color: var(--col-accent); 274 | text-decoration: none; 275 | } 276 | 277 | a:hover { 278 | color: var(--col-accent); 279 | text-decoration: underline; 280 | } 281 | 282 | h1 a, 283 | h2 a, 284 | h3 a, 285 | h4 a, 286 | h5 a, 287 | h6 a { 288 | color: var(--col-fg); 289 | } 290 | 291 | table { 292 | width: 100%; 293 | border-collapse: collapse; 294 | } 295 | 296 | thead { 297 | border-bottom: 1px solid var(--col-fg-dimmed); 298 | } 299 | 300 | td { 301 | border-bottom: 1px solid var(--col-bg-dimmed); 302 | } 303 | 304 | th, 305 | td { 306 | padding: 10px 0; 307 | text-align: left; 308 | } 309 | 310 | blockquote { 311 | padding: 0 0.5rem; 312 | margin: 0 -0.5rem; 313 | border-left: 3px solid var(--col-fg-dimmed); 314 | font-style: italic; 315 | } 316 | 317 | ul, 318 | ol { 319 | padding: 0; 320 | } 321 | 322 | /* ---------------------- SYNTAX HIGHLIGHING & CODEBLOCKS --------------------*/ 323 | 324 | code, 325 | pre { 326 | font-family: Monospace; 327 | background-color: var(--col-bg-dimmed); 328 | border-radius: 2px; 329 | } 330 | 331 | code { 332 | padding: 1px 7px; 333 | } 334 | 335 | pre { 336 | margin: 1em 0 1em -0.5rem; 337 | border-top: none; 338 | border-left: 2px solid var(--col-accent); 339 | border-bottom: 1px solid var(--col-bg-dimmed); 340 | border-right: 1px solid var(--col-bg-dimmed); 341 | display: block; 342 | overflow: auto; 343 | padding: 0.5rem; 344 | } 345 | 346 | /* --------------------------------- FOOTER ----------------------------------*/ 347 | 348 | footer { 349 | margin-top: 1.5rem; 350 | color: var(--col-fg-dimmed); 351 | border-top: 1px solid var(--col-bg-dimmed); 352 | } 353 | 354 | footer .content { 355 | padding: 10px 0; 356 | display: flex; 357 | flex-wrap: wrap; 358 | gap: 15px; 359 | flex-direction: row; 360 | align-items: baseline; 361 | } 362 | 363 | /* If there is only one item, center things */ 364 | footer .content> :only-child { 365 | text-align: center; 366 | } 367 | 368 | footer .content>* { 369 | flex-grow: 1; 370 | } 371 | 372 | footer .content li { 373 | list-style-type: none; 374 | } 375 | 376 | /* --------------------------------- SEARCH ----------------------------------*/ 377 | 378 | #docket-search form { 379 | text-align: center; 380 | margin: 15px auto; 381 | border: 1px solid var(--col-fg); 382 | border-radius: 4px; 383 | display: flex; 384 | flex-direction: row; 385 | } 386 | 387 | #docket-search form #query, 388 | #docket-search form button { 389 | padding: 5px; 390 | background: none; 391 | color: var(--col-fg); 392 | border: none; 393 | flex-grow: 0; 394 | flex-shrink: 1; 395 | } 396 | 397 | #docket-search form #query { 398 | min-width: 100px; 399 | flex-grow: 1; 400 | } 401 | 402 | #docket-search form button { 403 | font-weight: bold; 404 | } -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | pool: 2 | vmImage: 'ubuntu-latest' 3 | container: 'rust:latest' 4 | steps: 5 | - script: cargo test --all 6 | displayName: Cargo test 7 | - script: cargo test --all --no-default-features 8 | displayName: Cargo test no features -------------------------------------------------------------------------------- /docs/01-installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | Docket is written in Rust and available on Crates.io. To install it globally use `cargo`: 4 | 5 | ```bash 6 | $ cargo install docket 7 | ``` 8 | -------------------------------------------------------------------------------- /docs/02-useage.md: -------------------------------------------------------------------------------- 1 | # Usage 2 | 3 | To convert a folder full of markdown into a static HTML site is as simple as 4 | `docket`. By default the current directory is searched for markdown and the 5 | target directory where the site is rendered is `./build`. The source and target 6 | directory can be overridden with `-s` and `-t`: 7 | 8 | ```nohilight 9 | Docket Documentation Generator 10 | 11 | Usage: docket [options] 12 | 13 | Options: 14 | -h --help Show this screen. 15 | -w, --watch Watch for changes and re-generate. 16 | -s, --source= Documentation directory, default is current directory. 17 | -t, --target= Write the output to , default is `./build/`. 18 | ``` 19 | 20 | Further configuration is deliberately left impossible. The aim is to provide a 21 | simple way to create documentation without any configuration or theming to 22 | provide distractions. 23 | 24 | ## Logging 25 | 26 | By default `docket` keeps quiet about what it does. You can override this by 27 | setting the environment variables `DOCKET_LOG` and `DOCKET_LOG_STYLE` to [an 28 | `env_logger` compatible value][envlog]. 29 | 30 | [envlog]: https://docs.rs/env_logger/latest/env_logger/fn.init_from_env.html#examples -------------------------------------------------------------------------------- /docs/03-configuration.md: -------------------------------------------------------------------------------- 1 | # Configuration 2 | 3 | Docket aims to replicate the features of `d`. Although this is built around limitation of configuration there are still a few settings that can be tweaked. 4 | 5 | [TOC] 6 | 7 | ## Documentation Files 8 | 9 | Each file in the source directory with the extension of `md`, `mdown` or `markdown` is rendered as a page. 10 | 11 | ## Page Uris 12 | 13 | The Uri of a page is created from the file name by stripping leading numbers and replacing non-uri-safe characters with `-`. This allows leading numbers to be added to input file names to control the order in which pages appear in the main index. 14 | 15 | ## Page Titles 16 | 17 | Pages should start with a level-1 markdown heading. This heading will be used as the page title. Pages without such a heading are named their file names. 18 | 19 | ## Index 20 | 21 | To add content to the index page create a file called `index.md`. This content is rendered before the table of contents on the main page. The heading on the index page is the main documentation title. 22 | 23 | ## Documentation Title 24 | 25 | The title of the documentation is the first folder in the path which isn't one of `docs` or `documentation`. To override this create a file called `title` with the title in it. 26 | 27 | ## Footer 28 | 29 | The contents of `footer.md` will be added to the base of every page. 30 | -------------------------------------------------------------------------------- /docs/04-markdown-features.md: -------------------------------------------------------------------------------- 1 | # Markdown Features 2 | 3 | Markdown rendering is based on the [CommonMark](http://commonmark.org) spec and rendered by [`Pulldown`](https://crates.io/crates/pulldown-cmark). There are, however, a few extensions. 4 | 5 | ## Tree of Contents 6 | 7 | When placed on its own in a paragraph `[TOC]` is replaced with a rendered tree based on the headings within the current page. 8 | 9 | ## Highlighting 10 | 11 | Code blocks are highlighted with [Syntect](https://crates.io/crates/syntect) or 12 | [`highlight.js`](https://highlightjs.org). Syntect is the default. Syntect can 13 | be disabled at compile / install time by not including the `syntect-hl` feature, 14 | or at documentation build time by setting the `DOCKET_FORCE_JS_HL` environment 15 | variable. 16 | 17 | By default the type of each block is inferred automatically. Fenced code blocks 18 | can be used to add a hint about the type of code: 19 | 20 | :::nohighlight 21 | ```py 22 | def foo(): 23 | pass 24 | ``` 25 | 26 | Which renders as: 27 | 28 | ```py 29 | def foo(): 30 | pass 31 | ``` 32 | 33 | In addition the first line of the code block can specify a type hint if it begins with `:::`: 34 | 35 | ```nohighlight 36 | :::c 37 | #include 38 | 39 | void main() { 40 | printf("hello world!"); 41 | } 42 | ``` 43 | 44 | Which renders as: 45 | 46 | :::c 47 | #include 48 | 49 | void main() { 50 | printf("hello world!"); 51 | } 52 | 53 | ## Tables 54 | 55 | Tables are supported. Markdown tables are defined with `|` and `---` using the 56 | GFM tables extension. 57 | 58 | ```markdown 59 | Hello | Wolrd 60 | ------| ----- 61 | TESt | thing 62 | 123 | `123` 63 | more | *stuff* 64 | test | sfasf 65 | ``` 66 | 67 | Which renders as: 68 | 69 | Hello | Wolrd 70 | ------| ----- 71 | TESt | thing 72 | 123 | `123` 73 | more | *stuff* 74 | test | sfasf 75 | 76 | ## Task Lists 77 | 78 | Task lists are supported with the syntax from GFM. 79 | 80 | ``` 81 | * [ ] Milk 82 | * [x] Eggs 83 | * [x] Flour 84 | ``` 85 | 86 | Which renders as: 87 | 88 | * [ ] Milk 89 | * [x] Eggs 90 | * [x] Flour 91 | -------------------------------------------------------------------------------- /docs/design-documents/01-new-design.md: -------------------------------------------------------------------------------- 1 | # New Layout Design 2 | 3 | This design document aims to sum up my current thoughts and feelings about where I'd like this project to go. I feel that for medium sized projects Docket is starting to show the roughness around some edges. The plan is to allow docket to walk a tree of directories rather than just processing a single set of files from a single directory. 4 | 5 | ## Documentation Directory Layout 6 | 7 | Currently Docket searches for markdown files in a single folder. Files are sorted asciibetically, and the prefix on the filenames is stripped to generate each page's slug. To extend this to directories we can treat them in the same way. A directory will be examined for markdown contents. If markdown files are found then we generate a slug and order it in the same way we would a page. When rendering we can then recursively walk the contents of the folder. 8 | 9 | ``` 10 | docs/ 11 | + index.md 12 | + 01-wildlife.md 13 | + 02-plants/ 14 | | + trees.md 15 | | + shrubs.md 16 | | + index.md 17 | + 03-geology/ 18 | | + rocks.md 19 | | + magma.md 20 | ``` 21 | 22 | ### Pages and Bales 23 | 24 | At each level of our tree there can be three kinds of item: 25 | 26 | * **Pages** - standard markdown documents such as `01-wildlife.md` 27 | * **Indexes** - chapter overviews and high-level introductions to the contents of a given 'bale'. 28 | * **Bales** - Directories containing one or more markdown files. 29 | 30 | Here we group items together into "bales". Bales are collections of pages and inner bales. Bales are used as the unit of navigation. Given the example above there are three bales: The root bale, the `plants` bale, and the `geology` bale. 31 | 32 | ## Page Layout and Navigation 33 | 34 | Each rendered page contains three main items: content, TOC, and navigation. The content of the page is mainly the rendred markdown for the doucment, or the markdown of the bale's index. The TOC contains the overview of the current page. It should contain the H1 and H2 headings in the current document. The last item is the nagivation tree. This tree contains: The bales that are at the same level in the tree as the current page's bale, and the pages within the current document's bale. 35 | 36 | Given the example above the navigation in `wildlife.md` would contain the Root bale and it's contents (wildlife, geology, plants). IN the plants bale we instead have the sibilings (trees, strhubs) as well as the bales that are siblings of the current one (Plants, geology). 37 | 38 | **note**: I'm not sure yet if we also want the documents from the parent bale, as well as the bales themselves. 39 | 40 | ### Breadcrumbs 41 | 42 | For deeply nested documents we should expose a breadcrumb trail to allow quick access to higher level topics. 43 | 44 | ## Search 45 | 46 | For a large amount of the tree it will not be reachable from any given page. The solution to this is search. We should include a search box on every page. 47 | 48 | ## Rendering 49 | 50 | Rendering is currently split between the renderer, and the items that implement renderable. With bales in the mix it makes sense to split this out a little. The current rendering mixes in page structure with page location. Instead renderable should just be something with a single method that accepts a render context: 51 | 52 | ```rs 53 | trait Renderable { 54 | fn render(&mut self, ctx: &RenderCtx) -> Result; 55 | } 56 | ``` 57 | 58 | Or something like that. Then we can have bundles be renderable. The render context will then expose methods to create files and write to them, as well as keeping state information about the current location and path to root. 59 | 60 | Some of the current page layout is split between renderable and page. It would be better to introduce a new Layout type to abstract this. -------------------------------------------------------------------------------- /docs/design-documents/02-toculate.md: -------------------------------------------------------------------------------- 1 | # The Toc 2 | 3 | Docket aims to understand documentation as a heriachy of nodes and leaves that 4 | form a tree. The nodes in the tree are the bales The leaves in the tree are 5 | the pages. 6 | 7 | This high level heriachy will be navigable with the breadcrumbs, and with the 8 | main navigation. Navigation should allow moving around within a bundle, and 9 | navigating to sibling bundles, if any. Within a document however there is a 10 | separate heirachy. This is the *Tree of Contents*, or TOC. 11 | 12 | 13 | ## Toculation 14 | 15 | When a page is loaded we parse it with `pulldown`. This produces an iterator 16 | of parser events. Pulldown can render these events into HTML. To extract a 17 | TOC we want to pre-process this tree. 18 | 19 | Pulldown produces events for a node starting, for content within a node, and for 20 | a node ending. To build the TOC we want to trasnform a `Vec>` into 21 | a structure similar to the following: 22 | 23 | ```ml 24 | type TocEntry<'a> = 25 | | Events of Event list 26 | | Node of HeadingLevel * Events list * TocEntry list 27 | ``` 28 | 29 | That is an item in the TOC is either a collection of events, or an interor 30 | node. The interior nodes represent the headings. Heading nesting determines the 31 | layout of this tree. 32 | 33 | Once we have this sturcture we can then make sure TocEntry is also an 34 | iterator of its events. We should also expose our own iterator API to walk the 35 | headings in the TOC rather than the full set of events from pulldown. This can 36 | then be used to perform actions such as 'find the top level headings' which will 37 | be needed to control the page names, as well asn rendering the "what's on this 38 | page" section for in-page navigation. 39 | 40 | ## The `[TOC]` Command 41 | 42 | The current Docket rendering supports a few additions to markdown: 43 | 44 | * Code blocks can be prefixed with ::lang to control highlighting 45 | * A paragraph of just `[TOC]` is replaced wit a rendered tree of the headings. 46 | 47 | The first has probably run its course by now. We don't want to support some 48 | janky old syntax for specifying the langauge of a block when fenced blocks can 49 | be used instead. For the TOC however there's still a use. It's proposed we 50 | keep that and potentially expand it a little. It might be nice to allow TOC to 51 | be used to render trees at any level. That is a `[TOC]` at the root renders all 52 | the nodes below it. I.e. a `[TOC]` beneath an H1 renders all the H2 within it. 53 | We can then extend `[TOC]` with an argument for the heading _depth_ to render. 54 | 55 | e.g. 56 | 57 | ```markdown 58 | # One 59 | 60 | ## Two 61 | 62 | [TOC 1] 63 | 64 | ## Three 65 | 66 | [TOC 2] 67 | 68 | ### Four 69 | 70 | #### Four point Five 71 | 72 | #### Four going on Five 73 | 74 | ## Five 75 | 76 | ``` 77 | 78 | The `[TOC 1]` in `# Two` renders the `## Three` and `## Five` headings. The 79 | second `[TOC 2]` call would then render `### Four`, `#### Four point Five`, and 80 | `#### Four going on Five`. 81 | 82 | 83 | This extneded form of `[TOC]` would add flexibility, and make the TOC useful to 84 | show indexes for sub parts of a page. E.g. if a section is listing use cases the 85 | `[TOC]` could be used at the beginning of the section to list just the usecase 86 | headings. In this way headings and their children within a page are like 87 | 'lightweight pages' when it comes to TOC rendering. -------------------------------------------------------------------------------- /docs/design-documents/03-highlight.md: -------------------------------------------------------------------------------- 1 | # Syntax Highlighing 2 | 3 | Currently when docet walks the tree to produce a TOC blocks of code are 4 | highlighed. We support two highling modes: js, and Syntect. Syntect is the 5 | preferred mode. We should keep it that way. JS highlight may be a good fallback 6 | however if Syntect can't be built on a given environment for some reason. 7 | 8 | ## Sepration of Highlight and TOC 9 | 10 | Rather than have highlighting intertwined with TOC generation instead it would 11 | be nice if TOC was solely responsible for the heirachical transformation of the 12 | document, and the highlihging was done in a separate pass. If both TOC and 13 | highlight produce and consume iterators of events it should be possible to 14 | acchive this separation. 15 | 16 | ## Filtering the Events 17 | 18 | From the pulldown event stream we need to listen for block starts. If a block 19 | is stated we initiate a highligter for that syntax. If none can be found we 20 | fall back to some default highligh mode. For each line in the codeblock we can 21 | then render it out with the highlither. These events will be transformed and 22 | passed on to the consumer as pulldown HTML events. 23 | 24 | -------------------------------------------------------------------------------- /docs/design-documents/04-layouts.md: -------------------------------------------------------------------------------- 1 | # Layouts and Rendering 2 | 3 | Currently we walk the tree of bales and render out the indexes and pages within 4 | them. Moving more of the knowledge of the tree structure into the render contex 5 | will hopefully simplify some of the job of rendering. 6 | 7 | [TOC] 8 | 9 | ## Legacy 10 | 11 | In the old approach we had a seprataion between the `Renderer`, and the 12 | `Renderable`. This abstracted over the index and standar pages. In our new 13 | apprach pages are just pages. We still have some disconect betwen indexes and 14 | standard pages however, see `nav_prefix`. 15 | 16 | ## Proposed Structure 17 | 18 | When we walk a tree we initially create a root render context. This context 19 | contains just the title of documentaiton. We can also add more "global" state 20 | later, such as the global assets, and the highlighter. 21 | 22 | When we render an item in a render context we then do one of two things: 23 | 24 | * If we are rendering a plain page we create a `PageCtx` containing page 25 | specific information. 26 | * If we are rendering a bale we create a nested child `RenderCtx`. We can then 27 | create some special `PageCtx` to represent the index and render the index 28 | with that. 29 | 30 | Render contexts should contain information abot the current bale, as well. This 31 | allows navigation information to be quieried from the render context. This might 32 | be either an Enum burried within the render cotnex, or a pair of types: 33 | `RenderState`, and `RenderCtx`. In this approach the render context is the root 34 | immutable information that should be used for all rendering, such as site title, 35 | highlighter etc. The render state is then the bale-speciifc information. A 36 | render state is what is then used for page layout and rendering. 37 | 38 | ### The Layout Abstraction 39 | 40 | Pages shoudl implement some trait that allows them to be quieried for items to 41 | be used in the final rendered page. They shouldl not however know about the 42 | final stucture of the page. Pages really only need to expose some `
    ` 43 | which represents the page's content, and some `
      ` that represents the TOC 44 | tree. 45 | 46 | With the page's responsibilities tied down this way it should allow us to 47 | abstract away actual layout to some _third_ item. I'm calling this the Layout. 48 | When a render happens we'll need to provide some layout. This will probably live 49 | on the render ctx. Layouts are given access to a file that's open for write, the 50 | current render state, and the renderable. A layout's responsibility is solely 51 | that of putting HTML bits and bytes onto a page. 52 | 53 | ## Libraries 54 | 55 | Seprating rendering, layout, and renderables like this allows _some_ portion of 56 | `docket` to be moved into a library. A client would then be able to create a 57 | `docket` from any directory, make a `render_ctx`, and render to a target 58 | location. A builder API for render conntexts along with making the 59 | `Highlighter`, and `Layout` traits public would allow a user to inject a custom 60 | layout. 61 | 62 | ## Printing of Things 63 | 64 | The renderable trait that is passed to a `Layout` should expose the toc, title, 65 | and content of the document. We can expose these as opaque items which implement 66 | the `Display` trait. That way we can format directly into whatever target buffer 67 | is being written to. 68 | 69 | E.g.: 70 | 71 | ```rust 72 | trait Renderable { 73 | fn toc(&'a self, ctx: &'b RenderCtx) -> Toc<'a, 'b>; 74 | fn content(&'a self, ctx: &'b RenderCtx) -> Content<'a, 'b>; 75 | } 76 | 77 | type Content<'a> { 78 | toc: &'a TocTree; 79 | ctx: &'b RenderCtx; 80 | } 81 | 82 | impl<'a> Display for Content<'a> { 83 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 84 | // Create a highlighter iterator using the current highlighter, consuming 85 | // events from our main TOC. Pull those events down into Pulldown and have 86 | // it render directly to the output stream. 87 | html::write_html(Highlighted(self.ctx.highlighter, self.toc.iter())) 88 | .map_err(|_| fmt::Error) 89 | } 90 | } 91 | ``` -------------------------------------------------------------------------------- /docs/design-documents/05-doctree.md: -------------------------------------------------------------------------------- 1 | # The `doctree` 2 | 3 | A documentation set is a tree. The interior nodes in this tree are what we call 4 | `Bale`s. The leaf nodes are `Page`s. All together this is the `doctree`. 5 | 6 | A single node in the doctree should represent the _un-traversed_ state of that 7 | node. Each unopend bale can contain any number of child items, along with it's 8 | current index. If no index found on disk we should fabricate one from the folder 9 | name. Later layouts _may_ choose to render this with a grid of child pages or 10 | similar. 11 | 12 | ## Navigation 13 | 14 | Each point in the tree needs to be targetable with navigation. To create a link 15 | we need to know two things: the `slug` for the item, and the item's `title`. 16 | 17 | ```rs 18 | pub struct NavItem { 19 | pub title: String, 20 | pub slug: Slug, 21 | } 22 | ``` 23 | 24 | -------------------------------------------------------------------------------- /docs/design-documents/README.md: -------------------------------------------------------------------------------- 1 | # Design Documents 2 | 3 | This folder contains the design documents outlining the proposed sturcutre of 4 | Docket v2. The musings in this folder are by no means a guarantee of anything. 5 | It's more a set of aspriations ossified. 6 | 7 | ![An example asset](../img/example_asset.png) -------------------------------------------------------------------------------- /docs/footer.md: -------------------------------------------------------------------------------- 1 | [iwillspeak/docket](https://github.com/iwillspeak/docket) is open source software. -------------------------------------------------------------------------------- /docs/img/example_asset.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iwillspeak/docket/dfc69435a073dd27e1668eda4241e4b9f908bc8e/docs/img/example_asset.png -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Docket 2 | 3 | > Just document it 4 | 5 | Simple markdown to HTML documentation rendering. Docket aims to be a Rust clone of [`d`](https://github.com/sjl/d). 6 | 7 | Docket is open source on [GitHub](http://github.com/iwillspeak/docket). This site was rendered by `docket` from [the `docs/` folder](https://github.com/iwillspeak/docket/tree/main/docs). 8 | -------------------------------------------------------------------------------- /src/args.rs: -------------------------------------------------------------------------------- 1 | //! Command Line Argument Parsing 2 | //! 3 | //! This module defines the command line usage text and provides a method to 4 | //! load the arguments from the command line. 5 | 6 | use docopt::Docopt; 7 | use serde::Deserialize; 8 | 9 | /// Usage Information 10 | /// 11 | /// This is a [Docopt] compliant usage description of this program. 12 | /// 13 | /// [Docopt]: http://docopt.org/ 14 | const USAGE: &str = " 15 | Docket Documentation Generator 16 | 17 | Usage: docket [options] 18 | 19 | Options: 20 | --version Show the version. 21 | -h --help Show this screen. 22 | -s, --source= Documentation directory, default is current directory. 23 | -t, --target= Write the output to , default is `./build/`. 24 | -w, --watch Watch for changes and re-generate. 25 | "; 26 | 27 | /// Program Arguments 28 | /// 29 | /// Structure to capture the command line arguments for the program. This is 30 | /// filled in for us by Docopt. 31 | #[derive(Debug, Deserialize)] 32 | pub(crate) struct Args { 33 | pub flag_watch: bool, 34 | pub flag_source: Option, 35 | pub flag_target: Option, 36 | } 37 | 38 | /// Parse the arguments from a given command line 39 | pub(crate) fn from_argv, S: AsRef>( 40 | argv: I, 41 | ) -> Result { 42 | Docopt::new(USAGE).and_then(|d| { 43 | d.argv(argv) 44 | .help(true) 45 | .version(Some(format!("Docket {}", env!("CARGO_PKG_VERSION")))) 46 | .deserialize() 47 | }) 48 | } 49 | 50 | /// Get the command line arguments 51 | pub(crate) fn from_command_line() -> Args { 52 | let argv = std::env::args(); 53 | from_argv(argv).unwrap_or_else(|e| e.exit()) 54 | } 55 | 56 | #[cfg(test)] 57 | mod test { 58 | use super::from_argv; 59 | 60 | #[test] 61 | fn parse_empty_argv() { 62 | let args = from_argv::<&[String], &String>(&[]).unwrap(); 63 | assert_eq!(None, args.flag_source); 64 | assert_eq!(None, args.flag_target); 65 | assert_eq!(false, args.flag_watch); 66 | 67 | let args = from_argv(&["docket"]).unwrap(); 68 | assert_eq!(None, args.flag_source); 69 | assert_eq!(None, args.flag_target); 70 | assert_eq!(false, args.flag_watch); 71 | } 72 | 73 | #[test] 74 | fn parse_commands_argv() { 75 | let args = ["docket", "-s", "/some//.path", "-t", "../another", "-w"]; 76 | let args = from_argv(args); 77 | let args = args.unwrap(); 78 | assert_eq!(Some("/some//.path"), args.flag_source.as_deref()); 79 | assert_eq!(Some("../another"), args.flag_target.as_deref()); 80 | assert_eq!(true, args.flag_watch); 81 | } 82 | 83 | #[test] 84 | fn parse_long_form_options() { 85 | let args = [ 86 | "docket", 87 | "--target=../another", 88 | "--watch", 89 | "--source", 90 | "/some/Path with Spaces/", 91 | ]; 92 | let args = from_argv(args); 93 | let args = args.unwrap(); 94 | assert_eq!(Some("/some/Path with Spaces/"), args.flag_source.as_deref()); 95 | assert_eq!(Some("../another"), args.flag_target.as_deref()); 96 | assert_eq!(true, args.flag_watch); 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/asset.rs: -------------------------------------------------------------------------------- 1 | //! Non-Renderable Assets 2 | 3 | //! 4 | //! This module models the other files in a given docs directory which 5 | //! need to be copied to the output. We use this to abstract between 6 | //! 'bulitin' assets, such as the CSS which is bundled with Docket 7 | //! itself and on-disk assets from the source directory. 8 | 9 | use log::warn; 10 | 11 | use super::Result; 12 | use std::fs::{self, File}; 13 | use std::io::prelude::*; 14 | use std::path::{Path, PathBuf}; 15 | 16 | /// Defines a Documentation Asset 17 | /// 18 | /// Assets encompas non-markdown files which are copied to the target 19 | /// directory to support the rendered markdown files. 20 | #[derive(Debug)] 21 | pub enum Asset { 22 | /// On-Disk Asset 23 | /// 24 | /// On disk assets come from the source directory and are file-copied 25 | /// to the output when the asset is rendered. 26 | Disk(PathBuf), 27 | 28 | /// Internal Asset 29 | /// 30 | /// Internal assets represent fixed strings which are copied to a 31 | /// named file in the output directory when rendered. 32 | Internal(InternalAsset), 33 | } 34 | 35 | /// Internal Asset 36 | /// 37 | /// Defines the fixed contents for an internal asset. 38 | #[derive(Debug)] 39 | pub struct InternalAsset { 40 | /// The contents of the asset 41 | contents: &'static str, 42 | /// The file name to create 43 | name: &'static str, 44 | } 45 | 46 | impl Asset { 47 | /// Create an Internal Asset 48 | /// 49 | /// # Parameters 50 | /// * `name` - The name of the file to create in the output 51 | /// * `contents` - The contents to fill the file with 52 | pub const fn internal(name: &'static str, contents: &'static str) -> Self { 53 | Asset::Internal(InternalAsset { name, contents }) 54 | } 55 | 56 | /// Create a Path Asset 57 | /// 58 | /// # Parameters 59 | /// * `path` - The source path 60 | pub const fn path(path: PathBuf) -> Self { 61 | Asset::Disk(path) 62 | } 63 | 64 | /// Copy To 65 | /// 66 | /// This method is called to copy a given asset to the output 67 | /// directory. 68 | pub fn copy_to(&self, output: &Path) -> Result<()> { 69 | match self { 70 | Asset::Internal(int) => { 71 | let path = output.join(int.name); 72 | let mut file = File::create(&path)?; 73 | write!(file, "{}", int.contents)?; 74 | Ok(()) 75 | } 76 | Asset::Disk(path) => copy_single(&path, output), 77 | } 78 | } 79 | } 80 | 81 | fn copy_single(path: &Path, target: &Path) -> Result<()> { 82 | if path.is_dir() { 83 | let mut target = PathBuf::from(target); 84 | target.push(path.file_name().unwrap()); 85 | if !target.exists() { 86 | fs::create_dir(&target)?; 87 | } 88 | copy_recurse(path, &target)?; 89 | } else if let Some(name) = path.file_name() { 90 | fs::copy(path, target.join(name))?; 91 | } else { 92 | warn!("Asset at {:?} does not appear to be copyable", path); 93 | } 94 | Ok(()) 95 | } 96 | 97 | fn copy_recurse(source: &Path, target: &Path) -> Result<()> { 98 | for entry in fs::read_dir(source)? { 99 | let entry = entry?; 100 | let path = entry.path(); 101 | copy_single(&path, target)?; 102 | } 103 | Ok(()) 104 | } 105 | -------------------------------------------------------------------------------- /src/docket.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fs, io, 3 | path::{self, Path}, 4 | }; 5 | 6 | use log::trace; 7 | 8 | use crate::{ 9 | doctree::{self, Bale}, 10 | error::{Error, Result as DocketResult}, 11 | render, 12 | }; 13 | 14 | /// Docket 15 | /// 16 | /// Represents a documentation set. Responsible for opening the bales of 17 | /// documentation and traversing them to render out to HTML. 18 | #[derive(Debug)] 19 | pub struct Docket { 20 | title: String, 21 | doctree_root: Bale, 22 | } 23 | 24 | impl Docket { 25 | /// Open the given `path` as a documentaiton collection 26 | /// 27 | /// Once opned a documentaiton collection has a title, and can be rendreed 28 | /// to a target path. 29 | pub fn open>(path: P) -> DocketResult { 30 | if !path.as_ref().is_dir() { 31 | Err(Error::SourcePathNotADirectory(path.as_ref().into()))?; 32 | } 33 | Ok(Docket { 34 | title: title_from_path(path.as_ref())?, 35 | doctree_root: doctree::open(&path)?, 36 | }) 37 | } 38 | 39 | /// Render to HTML 40 | /// 41 | /// Renders the documentation set. Creates a tree of HTML files into the 42 | /// given `target` directory. 43 | pub fn render>(self, target: P) -> DocketResult<()> { 44 | trace!( 45 | "Rendering documentation for {} to {:?}", 46 | self.title, 47 | target.as_ref() 48 | ); 49 | render::render(target, self.title, self.doctree_root)?; 50 | Ok(()) 51 | } 52 | } 53 | 54 | /// Calculate the title of the documentation set from the given path. 55 | fn title_from_path(path: &Path) -> std::result::Result { 56 | let title_file = path.join("title"); 57 | Ok(if title_file.is_file() { 58 | fs::read_to_string(title_file)? 59 | } else { 60 | Path::canonicalize(path)? 61 | .components() 62 | .filter_map(|c| match c { 63 | path::Component::Normal(path) => path.to_owned().into_string().ok(), 64 | _ => None, 65 | }) 66 | .filter(|s| s != "docs") 67 | .last() 68 | .unwrap_or_else(|| String::from("Documentation")) 69 | }) 70 | } 71 | -------------------------------------------------------------------------------- /src/doctree.rs: -------------------------------------------------------------------------------- 1 | //! The Documentation Tree 2 | //! 3 | //! This module defines the types that model out the tree of documentation to be 4 | //! rendered. The tree is defined in such a way that it can be lazily traversed 5 | //! to avoid pulling the whole thing into memory at the beginning of rendering. 6 | //! 7 | //! The documentation tree is made up of two things: bales and pages. Bales form 8 | //! the interior nodes of the tree, and pages the leaves. 9 | 10 | use std::{ 11 | borrow::Borrow, 12 | fs, 13 | path::{Path, PathBuf}, 14 | result, 15 | }; 16 | 17 | use log::info; 18 | 19 | use crate::{ 20 | asset::Asset, 21 | error::Result, 22 | search, 23 | toc::Toc, 24 | utils::{self, slugify_path}, 25 | }; 26 | 27 | /// A Doctree Item 28 | /// 29 | /// Represents the kinds of item that can appear within the doctree. 30 | pub(crate) enum DoctreeItem { 31 | /// A leaf page 32 | Page(Page), 33 | 34 | /// An unopened bale 35 | Bale(Bale), 36 | } 37 | 38 | /// A Documentation Page 39 | /// 40 | /// Each documentaiton page is mad eup of two items: a simple `slug` which 41 | /// should be used to refer to the page in the navigation tree, and a TOC. The 42 | /// TOC is a heirachical represenattion of the contents of the page. A TOC can 43 | /// be traversed to inspect the structure of the page; and rendered to HTML. 44 | #[derive(Debug)] 45 | pub(crate) struct Page { 46 | slug: String, 47 | title: String, 48 | tree: Toc, 49 | } 50 | 51 | impl search::SearchableDocument for Page { 52 | /// Get the title for this page 53 | fn title(&self) -> &str { 54 | &self.title 55 | } 56 | 57 | /// Get the slug for this page 58 | fn slug(&self) -> &str { 59 | &self.slug 60 | } 61 | 62 | /// Get the search index for the given page 63 | fn search_index(&self) -> Option<&search::TermFrequenciesIndex> { 64 | Some(&self.content().search_index()) 65 | } 66 | } 67 | 68 | impl Page { 69 | /// Open a Page 70 | /// 71 | /// Loads the contents of the given file and parses it as markdown. 72 | pub fn open>(path: P) -> result::Result { 73 | let markdown = fs::read_to_string(&path)?; 74 | Ok(Self::from_parts(path, markdown)) 75 | } 76 | 77 | /// Construct a Page from Constituent Parts 78 | /// 79 | /// Builds the TOC tree for the given page, and returns the opened and 80 | /// parsed page. 81 | fn from_parts, M: Borrow>(path: P, markdown: M) -> Self { 82 | let slug = utils::slugify_path(&path); 83 | let tree = Toc::new(markdown.borrow()); 84 | let title = tree.primary_heading().cloned().unwrap_or_else(|| { 85 | path.as_ref() 86 | .file_stem() 87 | .unwrap() 88 | .to_string_lossy() 89 | .into_owned() 90 | }); 91 | Page { slug, title, tree } 92 | } 93 | 94 | /// Get the title for this page 95 | pub fn title(&self) -> &str { 96 | &self.title 97 | } 98 | 99 | /// Get the slug for this page 100 | pub fn slug(&self) -> &str { 101 | &self.slug 102 | } 103 | 104 | /// Get the content 105 | pub fn content(&self) -> &Toc { 106 | &self.tree 107 | } 108 | } 109 | 110 | /// An Unopened Bale 111 | /// 112 | /// Represents an interior node in the tree of documentation. Bales contain an 113 | /// index page, a number of child pages, and a number of assets. Assets are 114 | /// carried with the bale to ensure that relative paths to images and other 115 | /// items are preserved. 116 | /// 117 | /// An unopend bale can be broken open to access the inner `DoctreeItem`s within 118 | /// the bale. Until opened a `Bale` only performs a shallow traversal of its 119 | /// directory. This restrict sthe information avilable to just that needed to 120 | /// build the bale's own navigation item within the navigation tree. 121 | #[derive(Debug)] 122 | pub(crate) struct Bale { 123 | /// The frontispiece for this bale. This contains the bale's resolved slug 124 | /// and title to be used in navigation. 125 | frontispiece: Frontispiece, 126 | /// The paths we suspect to be page items 127 | pages: Vec, 128 | /// The paths we know to be assets 129 | assets: Vec, 130 | /// The paths we susepct to be child bales 131 | nested: Vec, 132 | } 133 | 134 | impl Bale { 135 | /// Create a new Bale 136 | /// 137 | /// Wraps the given `path` as a bale. This performs a shallow traversal of 138 | /// the directory to find the index to produce the `Frontispiece`. The full 139 | /// contents of the bale can be retrieved by `Bale::break_open`. 140 | pub fn new>(path: P) -> Result { 141 | let mut index = None; 142 | let mut footer = None; 143 | let mut pages = Vec::new(); 144 | let mut assets = Vec::new(); 145 | let mut nested = Vec::new(); 146 | 147 | // Walk the items in the directory and collect them into the initial 148 | // unsorted bale contents. We're just using raw paths at this point to refer 149 | // to all the bale's contents. 150 | for entry in fs::read_dir(&path)? { 151 | let entry = entry?; 152 | let path = entry.path().clone(); 153 | 154 | if path.is_file() { 155 | match utils::normalised_path_ext(&path).as_deref() { 156 | Some("md" | "markdown" | "mdown") => { 157 | match utils::normalised_stem(&path).as_deref() { 158 | Some("index" | "readme") => index = Some(path), 159 | Some("footer") => footer = Some(fs::read_to_string(path)?), 160 | _ => pages.push(path), 161 | } 162 | } 163 | _ => assets.push(path), 164 | } 165 | } else { 166 | nested.push(path); 167 | } 168 | } 169 | 170 | // If we have an index item in this bale then open the page. We need this to 171 | // know the bale's intended title for navigation purposes. 172 | let index = match index { 173 | Some(path) => Some(Page::open(path)?), 174 | None => None, 175 | }; 176 | 177 | Ok(Bale { 178 | frontispiece: Frontispiece::new(path, index, footer), 179 | pages, 180 | assets, 181 | nested, 182 | }) 183 | } 184 | 185 | /// Break Open the Bale 186 | /// 187 | /// This reifies the contents of the bale. Inner items are converted into 188 | /// real pages and bales. 189 | pub fn break_open(self) -> Result<(Frontispiece, Vec, Vec)> { 190 | info!( 191 | "Breaking open bale {} ({})", 192 | self.frontispiece.title, 193 | self.frontispiece.slug(), 194 | ); 195 | 196 | let mut assets: Vec<_> = self.assets.into_iter().map(Asset::path).collect(); 197 | let mut items = Vec::with_capacity(self.pages.len() + self.nested.len()); 198 | 199 | for page in self.pages { 200 | items.push(( 201 | utils::normalised_stem(&page), 202 | DoctreeItem::Page(Page::open(page)?), 203 | )); 204 | } 205 | 206 | for nested in self.nested { 207 | let bale = Bale::new(&nested)?; 208 | if bale.frontispiece.index.is_none() && bale.pages.is_empty() { 209 | info!( 210 | "Inner item {:?} does not appear to be able. Adding as an asset", 211 | &nested 212 | ); 213 | assets.push(Asset::path(nested)); 214 | } else { 215 | items.push((utils::normalised_stem(&nested), DoctreeItem::Bale(bale))); 216 | } 217 | } 218 | 219 | // Sort the items by their origional path. This allows files on disk to 220 | // be given a prefix that is stripped off in slugification but still 221 | // affects the item's order within the documentation tree. 222 | items.sort_by_cached_key(|(k, _)| k.clone()); 223 | 224 | Ok(( 225 | self.frontispiece, 226 | assets, 227 | items.into_iter().map(|(_, i)| i).collect(), 228 | )) 229 | } 230 | 231 | /// Get the Frontispiece for this bale 232 | pub(crate) fn frontispiece(&self) -> &Frontispiece { 233 | &self.frontispiece 234 | } 235 | } 236 | 237 | /// Bale Frontispiece 238 | /// 239 | /// The frontispiece represents the eagerly loaded portion of the bale. Bales 240 | /// are broken open into three parts: frontispiece, assets, and inner items. 241 | /// This type is used to group together the index. 242 | #[derive(Debug)] 243 | pub(crate) struct Frontispiece { 244 | /// The title for this bale. This is from the index page, if there is one, 245 | /// or falls back to the directory name otherwise. 246 | title: String, 247 | 248 | /// The slug for this bale 249 | /// 250 | /// TODO: Do we want a special `Slug` type to wrap these? 251 | slug: String, 252 | 253 | /// Index page for the bale, if one exists 254 | index: Option, 255 | 256 | /// The footer information for this bale. Rendering of any nested pages 257 | /// should use this as the markdown for the page's footer. 258 | footer: Option, 259 | } 260 | 261 | impl Frontispiece { 262 | /// Create a new Frontispiece 263 | /// 264 | /// This picks a title and slug for the bale based on the bale's path. 265 | fn new>(path: P, index: Option, footer: Option) -> Frontispiece { 266 | let title = match &index { 267 | Some(page) => page.title.clone(), 268 | None => utils::prettify_dir(&path).expect("Could not create a title"), 269 | }; 270 | let footer = footer.map(|text| { 271 | let mut output = String::new(); 272 | pulldown_cmark::html::push_html(&mut output, pulldown_cmark::Parser::new(&text)); 273 | output 274 | }); 275 | Frontispiece { 276 | title, 277 | slug: slugify_path(path), 278 | index, 279 | footer, 280 | } 281 | } 282 | 283 | /// Get the bale's slug 284 | pub fn slug(&self) -> &str { 285 | &self.slug 286 | } 287 | 288 | /// Get the bale's title 289 | pub fn title(&self) -> &str { 290 | &self.title 291 | } 292 | 293 | /// Get a reference to the index page of this bale, if any 294 | pub fn index_page(&self) -> Option<&Page> { 295 | self.index.as_ref() 296 | } 297 | 298 | /// Get the page's footer 299 | pub fn footer(&self) -> Option<&str> { 300 | self.footer.as_deref() 301 | } 302 | } 303 | 304 | /// Open a Doctree 305 | /// 306 | /// This tries to create a new doctree rooted at the given `path`. If the path 307 | /// can be opened and loaded as a valid `Bale` then that `Bale` is returned. If 308 | /// there was an error initialising the doctree that failure is propagated. 309 | pub(crate) fn open>(path: P) -> Result { 310 | Bale::new(path) 311 | } 312 | 313 | #[cfg(test)] 314 | mod test { 315 | 316 | use super::*; 317 | use std::path::PathBuf; 318 | 319 | #[test] 320 | fn page_has_search_terms() { 321 | let path = PathBuf::from("foo/bar.md"); 322 | let page = Page::from_parts(&path, "Some sample text in some text"); 323 | 324 | let index = page.content().search_index().as_raw(); 325 | assert_ne!(0, index.len()); 326 | let some_fq = index.get("some").cloned().unwrap_or_default(); 327 | let sample_fq = index.get("sample").cloned().unwrap_or_default(); 328 | let text_fq = index.get("text").cloned().unwrap_or_default(); 329 | assert_eq!(some_fq, text_fq); 330 | assert!(some_fq > sample_fq); 331 | } 332 | 333 | #[test] 334 | fn index_of_example_markdown() { 335 | let path = PathBuf::from("foo/bar.md"); 336 | let page = Page::from_parts( 337 | &path, 338 | r###" 339 | 340 | # Down the Rabbit Hole 341 | 342 | Either the well was very deep, or she fell very slowly, for she had 343 | plenty of time as she went down to look about her, and to wonder what 344 | was going to happen next. First, she tried to look down and make out 345 | what she was coming to, but it was too dark to see anything; then she 346 | looked at the sides of the well and noticed that they were filled with 347 | cupboards and book-shelves: here and there she saw maps and pictures 348 | hung upon pegs. She took down a jar from one of the shelves as she 349 | passed; it was labelled "ORANGE MARMALADE," but to her disappointment it 350 | was empty; she did not like to drop the jar for fear of killing 351 | somebody underneath, so managed to put it into one of the cupboards as 352 | she fell past it. 353 | 354 | "Well!" thought Alice to herself. "After such a fall as this, I shall 355 | think nothing of tumbling down stairs! How brave they'll all think me at 356 | home! Why, I wouldn't say anything about it, even if I fell off the top 357 | of the house!" (Which was very likely true.) 358 | 359 | ### Going Down? 360 | 361 | Down, down, down. Would the fall _never_ come to an end? "I wonder how 362 | many miles I've fallen by this time?" she said aloud. "I must be getting 363 | somewhere near the centre of the earth. Let me see: that would be four 364 | thousand miles down. I think--" (for, you see, Alice had learnt several 365 | things of this sort in her lessons in the schoolroom, and though this 366 | was not a _very_ good opportunity for showing off her knowledge, as 367 | there was no one to listen to her, still it was good practice to say it 368 | over) "--yes, that's about the right distance--but then I wonder what 369 | Latitude or Longitude I've got to?" (Alice had no idea what Latitude 370 | was, or Longitude either, but thought they were nice grand words to 371 | say.) 372 | 373 | "###, 374 | ); 375 | 376 | assert_eq!("Down the Rabbit Hole", page.title); 377 | 378 | // Check some of the relative frequencies of terms 379 | let index = page.content().search_index().as_raw(); 380 | assert_ne!(0, index.len()); 381 | let rabbit_fq = index.get("rabbit").cloned().unwrap_or_default(); 382 | assert!(rabbit_fq > 0.0); 383 | let well_fq = index.get("well").cloned().unwrap_or_default(); 384 | assert!(well_fq > rabbit_fq); 385 | assert_eq!( 386 | index.get("distance").cloned().unwrap_or_default(), 387 | rabbit_fq 388 | ); 389 | assert!(index.get("down").cloned().unwrap_or_default() > well_fq); 390 | 391 | // Check terms are downcased 392 | assert_ne!(None, index.get("orange")); 393 | assert_eq!(None, index.get("MARMALADE")); 394 | 395 | // check we don't have any whitespace or other junk symbols in the index 396 | assert_eq!(None, index.get("")); 397 | assert_eq!(None, index.get("!")); 398 | assert_eq!(None, index.get("-")); 399 | assert_eq!(None, index.get(" ")); 400 | assert_eq!(None, index.get("\t")); 401 | assert_eq!(None, index.get("(")); 402 | } 403 | } 404 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use std::{io, path::PathBuf}; 2 | 3 | /// Docket Error Type 4 | /// 5 | /// This type contains all the error kinds generated by Docket. Some library 6 | /// functions may return more specific error kinds. All error kinds used within 7 | /// this project should be coercable to this type. 8 | #[derive(Debug)] 9 | pub enum Error { 10 | /// Source path for documentation set was not a directory 11 | SourcePathNotADirectory(PathBuf), 12 | /// A generic IO Error occured 13 | Io(io::Error), 14 | /// Annotated inner error 15 | Annotated(String, Box), 16 | } 17 | 18 | impl Error { 19 | pub fn annotated(anno: S, err: E) -> Self 20 | where 21 | S: Into, 22 | E: std::error::Error + 'static, 23 | { 24 | Error::Annotated(anno.into(), Box::new(err)) 25 | } 26 | } 27 | 28 | impl std::fmt::Display for Error { 29 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 30 | match self { 31 | Error::SourcePathNotADirectory(path) => { 32 | write!( 33 | f, 34 | "Source path is not a directory, or can not be opened: {:?}", 35 | path 36 | ) 37 | } 38 | Error::Io(io) => io.fmt(f), 39 | Error::Annotated(anno, e) => { 40 | write!(f, "{}: {}", anno, e) 41 | } 42 | } 43 | } 44 | } 45 | 46 | impl std::error::Error for Error { 47 | fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { 48 | match self { 49 | Error::Io(ioerr) => Some(ioerr), 50 | _ => None, 51 | } 52 | } 53 | } 54 | 55 | impl From for Error { 56 | fn from(err: io::Error) -> Self { 57 | Error::Io(err) 58 | } 59 | } 60 | 61 | /// Docket Result Type 62 | /// 63 | /// Shorthand result type for functions returning docket errors. 64 | pub type Result = std::result::Result; 65 | 66 | pub(crate) trait ResultExt> { 67 | fn annotate_err(self, anno: A) -> Result; 68 | } 69 | 70 | impl ResultExt for std::result::Result 71 | where 72 | E: std::error::Error + 'static, 73 | A: Into, 74 | { 75 | fn annotate_err(self, anno: A) -> Result { 76 | self.map_err(|err| Error::annotated(anno, err)) 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/highlight.rs: -------------------------------------------------------------------------------- 1 | //! Syntax Highlighting 2 | //! 3 | //! This module provides a trait for highlighinng code blocks and a pair of 4 | //! implementations. The `SyntectHighligher` uses the Syntect crate to render 5 | //! the code to highlighted HTML at build time. The `HighlightJsHighlighter` 6 | //! uses the Highlight JS library at runtime to process codeblocks in the user's 7 | //! browser. 8 | //! 9 | //! If the `syntect-hl` feature is enabled then both highlighters will be 10 | //! available, and syntect perferred. HighlightJS is always avaiable. 11 | 12 | use std::io::Write; 13 | 14 | use log::debug; 15 | use pulldown_cmark::{CodeBlockKind, Event, Tag}; 16 | 17 | pub(crate) trait Highlighter { 18 | /// # Highlight a Code Block 19 | /// 20 | /// Returns a list of the events to emit to the TOC to represent the block. 21 | fn hl_codeblock<'a>(&self, name: Option<&str>, block: &str) -> Vec; 22 | 23 | /// # Write any HTML header required for highlighting on this page. 24 | fn write_header(&self, out: &mut dyn Write) -> std::io::Result<()>; 25 | } 26 | 27 | pub use js_hl::HighlightJsHighlighter; 28 | #[cfg(feature = "syntect-hl")] 29 | pub use syntect_hl::SyntectHighlighter; 30 | 31 | #[cfg(feature = "syntect-hl")] 32 | mod syntect_hl { 33 | use std::io::Write; 34 | 35 | use pulldown_cmark::Event; 36 | use syntect::{ 37 | self, highlighting::ThemeSet, html::highlighted_html_for_string, parsing::SyntaxSet, 38 | }; 39 | 40 | use super::{to_default_events, Highlighter}; 41 | 42 | pub struct SyntectHighlighter { 43 | ss: SyntaxSet, 44 | ts: ThemeSet, 45 | } 46 | 47 | impl SyntectHighlighter { 48 | /// # Create a New Highlighter 49 | pub fn new() -> Self { 50 | let ss = SyntaxSet::load_defaults_newlines(); 51 | let ts = ThemeSet::load_defaults(); 52 | SyntectHighlighter { ss, ts } 53 | } 54 | } 55 | 56 | impl Highlighter for SyntectHighlighter { 57 | fn hl_codeblock(&self, name: Option<&str>, block: &str) -> Vec { 58 | let syntax = name 59 | .and_then(|name| self.ss.find_syntax_by_token(&name)) 60 | .unwrap_or_else(|| self.ss.find_syntax_plain_text()); 61 | 62 | // debug!("source name: {}, syntax: {:?}", name, syntax.name); 63 | 64 | let theme = &self.ts.themes["InspiredGitHub"]; 65 | let highlighted = highlighted_html_for_string(&block, &self.ss, &syntax, theme); 66 | match highlighted { 67 | Ok(html) => vec![Event::Html(html.into())], 68 | Err(_) => to_default_events(name, &block), 69 | } 70 | } 71 | fn write_header(&self, _out: &mut dyn Write) -> std::io::Result<()> { 72 | Ok(()) 73 | } 74 | } 75 | } 76 | 77 | mod js_hl { 78 | use std::io::Write; 79 | 80 | use pulldown_cmark::Event; 81 | 82 | use super::{to_default_events, Highlighter}; 83 | 84 | pub struct HighlightJsHighlighter; 85 | 86 | impl Highlighter for HighlightJsHighlighter { 87 | fn hl_codeblock(&self, name: Option<&str>, block: &str) -> Vec { 88 | to_default_events(name, &block) 89 | } 90 | 91 | fn write_header(&self, out: &mut dyn Write) -> std::io::Result<()> { 92 | const HLJS_VERSION: &str = "10.5.0"; 93 | write!( 94 | out, 95 | r#" 96 | 98 | 99 | "#, 100 | HLJS_VERSION 101 | ) 102 | } 103 | } 104 | } 105 | 106 | fn to_default_events<'a, 'b>(name: Option<&'a str>, block: &'a str) -> Vec> { 107 | let kind = match name { 108 | Some(name) => CodeBlockKind::Fenced(name.to_owned().into()), 109 | None => CodeBlockKind::Indented, 110 | }; 111 | vec![ 112 | Event::Start(Tag::CodeBlock(kind.clone())), 113 | Event::Text(block.to_owned().into()), 114 | Event::End(Tag::CodeBlock(kind)), 115 | ] 116 | } 117 | 118 | /// # Get the Active Highlighter 119 | /// 120 | /// Returns a reference to a shared highlighter. 121 | pub(crate) fn get_hilighter() -> &'static dyn Highlighter { 122 | static GLOBAL_JS_HL: HighlightJsHighlighter = HighlightJsHighlighter; 123 | 124 | #[cfg(feature = "syntect-hl")] 125 | if std::env::var("DOCKET_FORCE_JS_HL").is_err() { 126 | use once_cell::sync::Lazy; 127 | static GLOBAL_SYNTECT_HL: Lazy = 128 | Lazy::new(|| SyntectHighlighter::new()); 129 | debug!("Using syntect for highlighting."); 130 | return &*GLOBAL_SYNTECT_HL; 131 | } 132 | 133 | debug!("Using Javascript highlighter."); 134 | &GLOBAL_JS_HL 135 | } 136 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | //! Markdown to HTML Documentation Generator 2 | 3 | #![deny(missing_docs)] 4 | 5 | mod args; 6 | mod asset; 7 | mod docket; 8 | mod doctree; 9 | mod error; 10 | mod highlight; 11 | mod render; 12 | mod search; 13 | mod toc; 14 | mod utils; 15 | 16 | use std::{ 17 | error::Error, 18 | path::{Path, PathBuf}, 19 | }; 20 | 21 | use crate::docket::Docket; 22 | use error::Result; 23 | use log::{info, warn}; 24 | 25 | /// On Error Behaviour 26 | /// 27 | /// Chooses what should happen if an error happens when running the build. 28 | #[derive(PartialEq, Copy, Clone)] 29 | #[cfg_attr(not(feature = "watch"), allow(dead_code))] 30 | enum OnError { 31 | Ignore, 32 | Exit, 33 | } 34 | 35 | fn main() { 36 | init_logging(); 37 | 38 | let args = args::from_command_line(); 39 | let source = utils::path_or_default(args.flag_source, "."); 40 | let target = utils::path_or_default(args.flag_target, "build/"); 41 | 42 | handle_err( 43 | if args.flag_watch { 44 | watch_and_build(&target, &source) 45 | } else { 46 | build(&source, &target) 47 | }, 48 | OnError::Exit, 49 | ) 50 | } 51 | 52 | /// Watch and Rebuild 53 | /// 54 | /// This opens a file watcher listening for changes in the source directory. 55 | /// When a file is changed we re-build the documentaiton tree. 56 | fn watch_and_build(target: &PathBuf, source: &PathBuf) -> Result<()> { 57 | #[cfg(feature = "watch")] 58 | { 59 | use crate::error::ResultExt; 60 | use notify::{watcher, RecursiveMode, Watcher}; 61 | use std::{fs, sync::mpsc::channel, time::Duration}; 62 | 63 | // Create the target directory first, to ensure we can unwatch it if 64 | // needed. 65 | fs::create_dir_all(target).annotate_err("Error creating target directory")?; 66 | 67 | let (tx, rx) = channel(); 68 | 69 | let mut watcher = 70 | watcher(tx, Duration::from_secs(2)).annotate_err("could not create file watcher")?; 71 | 72 | watcher 73 | .watch(source, RecursiveMode::Recursive) 74 | .annotate_err("Error watching source directory")?; 75 | // Ignore the erorr here. It most likely means the source and target dir 76 | // don't overlap. 77 | let _ = watcher.unwatch(target); 78 | 79 | handle_err(build(source, target), OnError::Ignore); 80 | println!("Build complete. Watching for changes."); 81 | loop { 82 | match rx.recv() { 83 | Ok(s) => match s { 84 | // Ignore these. They're not debounced. 85 | notify::DebouncedEvent::NoticeWrite(_) 86 | | notify::DebouncedEvent::NoticeRemove(_) => (), 87 | // Write something out if we notice erorrs. 88 | notify::DebouncedEvent::Error(e, path) => { 89 | warn!("Watcher error at path {path:?}: {e}") 90 | } 91 | // Anything else means a rebuild 92 | _ => { 93 | println!("Rebuilding..."); 94 | handle_err(build(source, target), OnError::Ignore); 95 | println!("Rebuild complete. Watching for changes."); 96 | } 97 | }, 98 | // println!("Rebuilding... {s:?}") 99 | Err(e) => eprintln!("Watcher error: {}", e), 100 | } 101 | } 102 | } 103 | #[cfg(not(feature = "watch"))] 104 | { 105 | eprintln!("Watch not supported. Performing on off build."); 106 | build(source, target) 107 | } 108 | } 109 | 110 | /// Display the contents of the error, if any, and maybe exit 111 | fn handle_err(err: Result<()>, on_err: OnError) { 112 | if let Err(err) = err { 113 | eprintln!("docket: error: {}", err); 114 | if let Some(source) = err.source() { 115 | warn!("Error caused by {}", source); 116 | } 117 | if on_err == OnError::Exit { 118 | std::process::exit(-1); 119 | } 120 | } 121 | } 122 | 123 | /// Run a single pass of documentation generation 124 | /// 125 | /// This does the main job of rendering the documentaiton. Seprated into a 126 | /// different function so we can use the `?` operator, and repeatedly call if 127 | /// we're watching files and re-rendering on change. 128 | fn build(source: &Path, target: &Path) -> Result<()> { 129 | info!("Rendering documenation from {:?} => {:?}", &source, &target); 130 | Docket::open(source)?.render(target) 131 | } 132 | 133 | /// Initialise logging 134 | /// 135 | /// Sets up the env logger using our custom environment varaibles. 136 | fn init_logging() { 137 | use env_logger::*; 138 | builder() 139 | .target(Target::Stdout) 140 | .parse_env( 141 | Env::new() 142 | .filter("DOCKET_LOG") 143 | .write_style("DOCKET_LOG_STYLE"), 144 | ) 145 | .init(); 146 | } 147 | -------------------------------------------------------------------------------- /src/render.rs: -------------------------------------------------------------------------------- 1 | use log::trace; 2 | 3 | mod layout; 4 | 5 | use crate::{ 6 | asset::Asset, 7 | doctree::{self, DoctreeItem, Frontispiece, Page}, 8 | error::Result, 9 | search::{self, SearchableDocument}, 10 | }; 11 | use std::{ 12 | fs::{self, File}, 13 | io::BufWriter, 14 | path::{Path, PathBuf}, 15 | }; 16 | 17 | use self::layout::Layout; 18 | 19 | /// Render Contex 20 | /// 21 | /// A render context represents a point into which pages can be rendered. It 22 | /// stores global information about the current rendering process. The render 23 | /// context is immutable for the duration of the render. 24 | /// 25 | /// Nested information about the current output directory, the bale being 26 | /// rendred, and the current point in the navigation heirachy is stored in the 27 | /// `RenderState`. 28 | pub struct RenderContext { 29 | /// The output directory to write the state to 30 | path: PathBuf, 31 | /// The overall site name. This is used as the root point in the navigation. 32 | site_name: String, 33 | /// The layout for this render 34 | layout: Option>, 35 | } 36 | 37 | impl RenderContext { 38 | /// Create a new Root Render Context 39 | /// 40 | /// Root render contexts hold global information about the render, and are 41 | /// used as parents for derived cotnexts. 42 | pub fn new(path: PathBuf, site_name: String) -> Self { 43 | RenderContext { 44 | path, 45 | site_name, 46 | layout: None, 47 | } 48 | } 49 | 50 | fn layout(&self) -> &dyn Layout { 51 | self.layout 52 | .as_deref() 53 | .unwrap_or_else(layout::get_default_layout) 54 | } 55 | } 56 | 57 | enum RenderStateKind<'s, 'b> { 58 | /// A root render state. This state has a direct reference to the render 59 | /// context. 60 | Root(&'s RenderContext), 61 | /// A nested render state. This is made up of a reference to the parent 62 | /// render state, along with the new directory root this context should 63 | /// write into. 64 | Nested(&'s RenderState<'s, 'b>, PathBuf), 65 | } 66 | 67 | impl<'s, 'b> RenderStateKind<'s, 'b> { 68 | /// Create a new root render state 69 | /// 70 | /// This render state represents the root node in the documentaiton tree. It 71 | /// renders to the path in the given render context directly. 72 | fn new_root(context: &'s RenderContext) -> Self { 73 | RenderStateKind::Root(context) 74 | } 75 | 76 | /// Create a new render context based on a parent state 77 | /// 78 | /// The nested render state gets a new path based on the paren'ts location 79 | /// and the given `slug`. 80 | fn with_parent(state: &'s RenderState<'s, 'b>, slug: &str) -> Self { 81 | let mut new_path = PathBuf::from(state.output_path()); 82 | new_path.push(slug); 83 | RenderStateKind::Nested(state, new_path) 84 | } 85 | } 86 | 87 | /// Render State 88 | /// 89 | /// The render state is used by layout to render pages. 90 | pub struct RenderState<'s, 'b> { 91 | /// The kind link 92 | kind: RenderStateKind<'s, 'b>, 93 | /// The bale that is being rendered 94 | bale: &'b Frontispiece, 95 | /// The navigation items at this level 96 | navs: Vec, 97 | } 98 | 99 | impl<'s, 'b> RenderState<'s, 'b> { 100 | /// Create a new render state 101 | /// 102 | /// This render state represents the root node in the documentaiton tree. It 103 | /// renders to the path in the given render context directly. 104 | fn new(kind: RenderStateKind<'s, 'b>, bale: &'b Frontispiece, navs: Vec) -> Self { 105 | RenderState { kind, bale, navs } 106 | } 107 | 108 | /// Get the output path for this render state 109 | /// 110 | /// The path is the folder where items should be created when rendering. 111 | fn output_path(&self) -> &Path { 112 | match &self.kind { 113 | RenderStateKind::Root(ctx) => &ctx.path, 114 | RenderStateKind::Nested(_, path) => path, 115 | } 116 | } 117 | 118 | /// The current bale 119 | fn current_bale(&self) -> &Frontispiece { 120 | &self.bale 121 | } 122 | 123 | /// Get the render context 124 | fn ctx(&self) -> &RenderContext { 125 | match self.kind { 126 | RenderStateKind::Root(ctx) => ctx, 127 | RenderStateKind::Nested(parent, _) => parent.ctx(), 128 | } 129 | } 130 | 131 | /// Get this state's parent, if any 132 | fn parent(&self) -> Option<&RenderState> { 133 | if let RenderStateKind::Nested(parent, _) = &self.kind { 134 | Some(parent) 135 | } else { 136 | None 137 | } 138 | } 139 | 140 | /// Get the path to the root for the given page kind 141 | fn path_to_root(&self, page: &PageKind) -> String { 142 | let mut current = self; 143 | let mut path = String::new(); 144 | while let Some(parent) = current.parent() { 145 | path.push_str("../"); 146 | current = parent 147 | } 148 | 149 | if let PageKind::Nested(_) = page { 150 | path.push_str("../"); 151 | } 152 | 153 | path 154 | } 155 | } 156 | 157 | /// An entry in the navigation tree. 158 | struct NavInfo { 159 | /// The title of the item. 160 | pub title: String, 161 | /// The slug to use when constructing a URI. 162 | pub slug: String, 163 | } 164 | 165 | impl NavInfo { 166 | fn new(slug: &str, title: &str) -> Self { 167 | NavInfo { 168 | title: title.to_owned(), 169 | slug: slug.to_owned(), 170 | } 171 | } 172 | } 173 | 174 | /// Kind of page we are rendering. For index pages we don't need to do anything 175 | /// to get to the bale root. For nested pages we keep the page's slug. 176 | pub(crate) enum PageKind { 177 | /// An index page 178 | Index, 179 | /// A neseted page with a given slug 180 | Nested(String), 181 | } 182 | 183 | impl PageKind { 184 | /// Get the path to the current bale given this page's kind. 185 | fn path_to_bale(&self) -> &'static str { 186 | match self { 187 | PageKind::Index => "./", 188 | PageKind::Nested(_) => "../", 189 | } 190 | } 191 | } 192 | 193 | enum RenderedItem { 194 | Page(Page), 195 | Nested(String, Box), 196 | } 197 | 198 | impl SearchableDocument for RenderedItem { 199 | fn title(&self) -> &str { 200 | match self { 201 | RenderedItem::Page(p) => p.title(), 202 | RenderedItem::Nested(_, n) => n.title(), 203 | } 204 | } 205 | 206 | fn slug(&self) -> &str { 207 | match self { 208 | RenderedItem::Page(p) => p.slug(), 209 | RenderedItem::Nested(s, _) => &s, 210 | } 211 | } 212 | 213 | fn search_index(&self) -> Option<&search::TermFrequenciesIndex> { 214 | match self { 215 | RenderedItem::Page(p) => p.search_index(), 216 | RenderedItem::Nested(_, n) => n.search_index(), 217 | } 218 | } 219 | } 220 | 221 | impl RenderedItem { 222 | fn page(page: Page) -> Self { 223 | Self::Page(page) 224 | } 225 | 226 | fn nested(slug: &str, inner: RenderedItem) -> Self { 227 | let slug = format!("{}/{}", slug, inner.slug()); 228 | Self::Nested(slug, Box::new(inner)) 229 | } 230 | } 231 | 232 | /// Render a bale to the given directory 233 | /// 234 | /// This walks the tree of documentaiton referred to by the given `bale` and 235 | /// writes the rendered result to the given `ctx`. 236 | fn render_bale_contents( 237 | state: &RenderState, 238 | assets: Vec, 239 | items: Vec, 240 | ) -> Result> { 241 | trace!( 242 | "rendering bale contents {:?} to {:?}", 243 | state.bale, 244 | state.output_path() 245 | ); 246 | fs::create_dir_all(&state.output_path())?; 247 | 248 | let mut rendered_items = Vec::new(); 249 | 250 | // If we have an index page then redner that 251 | if let Some(page) = state.current_bale().index_page() { 252 | trace!("Bale has an index. Rendering."); 253 | render_page(&state, PageKind::Index, page)?; 254 | // TODO: We don't add the index pages to the search index here, because 255 | // we don't _own_ the index pages. This needs fixing. 256 | } 257 | 258 | // Walk our assets and copy them 259 | for asset in assets { 260 | asset.copy_to(&state.output_path())?; 261 | } 262 | 263 | // Walk the inner items in the bale and render them, in nested contexts if 264 | // required. 265 | for item in items { 266 | match item { 267 | DoctreeItem::Bale(bale) => { 268 | let (bale, assets, items) = bale.break_open()?; 269 | let navs = navs_for_items(&items); 270 | let state = RenderState::new( 271 | RenderStateKind::with_parent(&state, bale.slug()), 272 | &bale, 273 | navs, 274 | ); 275 | rendered_items.extend( 276 | render_bale_contents(&state, assets, items)? 277 | .into_iter() 278 | .map(|item| RenderedItem::nested(bale.slug(), item)), 279 | ); 280 | } 281 | DoctreeItem::Page(page) => { 282 | render_page(&state, PageKind::Nested(page.slug().to_owned()), &page)?; 283 | rendered_items.push(RenderedItem::page(page)) 284 | } 285 | } 286 | } 287 | 288 | Ok(rendered_items) 289 | } 290 | 291 | fn navs_for_items(items: &[DoctreeItem]) -> Vec { 292 | items 293 | .iter() 294 | .map(|item| match item { 295 | DoctreeItem::Page(page) => NavInfo::new(page.slug(), page.title()), 296 | DoctreeItem::Bale(bale) => { 297 | NavInfo::new(bale.frontispiece().slug(), bale.frontispiece().title()) 298 | } 299 | }) 300 | .collect() 301 | } 302 | 303 | /// Render a Single Page 304 | /// 305 | /// Writes the rendred contents of a given page to a given path. 306 | fn render_page(state: &RenderState, kind: PageKind, page: &doctree::Page) -> Result<()> { 307 | let mut path = PathBuf::from(state.output_path()); 308 | if let PageKind::Nested(slug) = &kind { 309 | path.push(slug); 310 | fs::create_dir_all(&path)?; 311 | }; 312 | 313 | trace!("rendering page {} at {:?}", page.title(), path); 314 | 315 | let output_path = path.join("index.html"); 316 | let file = File::create(&output_path)?; 317 | let mut writer = BufWriter::new(file); 318 | 319 | let layout = state.ctx().layout(); 320 | layout.render(&mut writer, state, kind, page)?; 321 | 322 | Ok(()) 323 | } 324 | 325 | /// Copy any assets used by the layout 326 | fn copy_global_assets(ctx: &RenderContext) -> Result<()> { 327 | fs::create_dir_all(&ctx.path)?; 328 | for asset in ctx.layout().assets() { 329 | asset.copy_to(&ctx.path)?; 330 | } 331 | 332 | Ok(()) 333 | } 334 | 335 | /// Render a Doctree 336 | /// 337 | /// Wwrite the given doctree out to the `target` path using the default render 338 | /// contex. 339 | pub(crate) fn render>( 340 | target: P, 341 | title: String, 342 | doctree_root: doctree::Bale, 343 | ) -> Result<()> { 344 | // The render context. This contains the global state used in rendering 345 | let ctx = RenderContext::new(target.as_ref().to_owned(), title); 346 | 347 | // Break opne the root bale and build a root render state. This is the root 348 | // of the render state tree as we walk the document tree. 349 | let (frontispiece, assets, items) = doctree_root.break_open()?; 350 | let navs = navs_for_items(&items); 351 | let state = RenderState::new(RenderStateKind::new_root(&ctx), &frontispiece, navs); 352 | 353 | // Copy any global assets. This allows layouts and other global items to 354 | // include items in the output. 355 | copy_global_assets(&ctx)?; 356 | 357 | // Render the documentation itself. 358 | let docs = render_bale_contents(&state, assets, items)?; 359 | 360 | // Write out a search index for all the rendered documents. 361 | search::write_search_indices(&ctx.path, docs.iter())?; 362 | 363 | Ok(()) 364 | } 365 | -------------------------------------------------------------------------------- /src/render/layout.rs: -------------------------------------------------------------------------------- 1 | //! Layout 2 | //! 3 | //! This module contains the traits needed to implement a layout. Layouts are 4 | //! used to abstract over the exact HTML that is written. 5 | 6 | mod html; 7 | 8 | use super::{PageKind, RenderState}; 9 | use crate::{asset::Asset, doctree, error::Result}; 10 | use html::HtmlLayout; 11 | use std::io::Write; 12 | 13 | /// Layout Trait 14 | /// 15 | /// Layouts are responsible for writing out the contents of pages to files. A 16 | /// layout recieves a reference to the current render state, and information 17 | /// about the current page. 18 | pub(crate) trait Layout { 19 | /// Render a Page 20 | /// 21 | /// Layout rendering should write a representaiton of the `page` to the 22 | /// given `writer`. Context is provided in the `state` and `kind` make 23 | /// redenring of navigation information possible. 24 | fn render( 25 | &self, 26 | writer: &mut dyn Write, 27 | state: &RenderState, 28 | kind: PageKind, 29 | page: &doctree::Page, 30 | ) -> Result<()>; 31 | 32 | /// Get the Layout's Assets 33 | /// 34 | /// Returns a list of items to copy to the site root if this layout is used 35 | /// to render things. 36 | fn assets(&self) -> &[Asset]; 37 | } 38 | 39 | // Get the dfault layout 40 | pub(crate) fn get_default_layout<'a>() -> &'a dyn Layout { 41 | const DEFAULT: HtmlLayout = HtmlLayout; 42 | &DEFAULT 43 | } 44 | -------------------------------------------------------------------------------- /src/render/layout/html.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | asset::Asset, 3 | doctree::Page, 4 | error::Result, 5 | render::{NavInfo, PageKind, RenderState}, 6 | toc::{Nodes, Toc, TocElement}, 7 | }; 8 | use pulldown_cmark::HeadingLevel; 9 | use std::{fmt, io::Write}; 10 | 11 | use super::Layout; 12 | 13 | struct Breadcrumbs<'a>(&'a RenderState<'a, 'a>, &'a str); 14 | 15 | impl<'a> fmt::Display for Breadcrumbs<'a> { 16 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 17 | let mut current = Some(self.0); 18 | let mut stack = Vec::new(); 19 | let mut path = String::from(self.1); 20 | 21 | while let Some(state) = current { 22 | stack.push((path.clone(), state)); 23 | path.push_str("../"); 24 | current = state.parent(); 25 | } 26 | write!(f, "") 44 | } 45 | } 46 | 47 | struct Content<'a>(&'a Toc); 48 | 49 | impl<'a> fmt::Display for Content<'a> { 50 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 51 | for element in self.0.walk_elements() { 52 | match element { 53 | TocElement::Html(htm) => htm.fmt(f)?, 54 | TocElement::TocReference => render_toc_to(f, self.0.nodes(), HeadingLevel::H3)?, 55 | // We only need to write the heading here, the contents will be 56 | // handled by the recurse from the walker. 57 | TocElement::Node(nested) => write!( 58 | f, 59 | "<{level} id='{slug}'>{heading}", 60 | level = &nested.heading.level, 61 | slug = &nested.heading.slug, 62 | heading = &nested.heading.contents, 63 | )?, 64 | } 65 | } 66 | Ok(()) 67 | } 68 | } 69 | 70 | struct RenderedToc<'a>(&'a Toc, HeadingLevel); 71 | 72 | impl<'a> fmt::Display for RenderedToc<'a> { 73 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 74 | if self.0.nodes().count() == 1 { 75 | let inner_node = self.0.nodes().next().unwrap(); 76 | if inner_node.nodes().count() > 0 { 77 | render_toc_to(f, inner_node.nodes(), self.1)?; 78 | 79 | return Ok(()); 80 | } 81 | } 82 | render_toc_to(f, self.0.nodes(), self.1) 83 | } 84 | } 85 | 86 | /// Render the TOC 87 | /// 88 | /// This walks the list of `TocElement`s and renders them as a list of links 89 | /// using their clean text for the link body. 90 | fn render_toc_to(f: &mut fmt::Formatter<'_>, nodes: Nodes, limit: HeadingLevel) -> fmt::Result { 91 | write!(f, "
        ")?; 92 | for node in nodes { 93 | if node.heading.level <= limit { 94 | write!( 95 | f, 96 | "
      • {1}", 97 | node.heading.slug, node.heading.contents 98 | )?; 99 | 100 | render_toc_to(f, node.nodes(), limit)?; 101 | } 102 | } 103 | write!(f, "
      ") 104 | } 105 | 106 | /// Renderable struct 107 | struct Navs<'a>(&'a [NavInfo], &'a str); 108 | 109 | impl<'a> fmt::Display for Navs<'a> { 110 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 111 | if self.0.len() > 0 { 112 | write!(f, "
        ")?; 113 | for nav in self.0.iter() { 114 | write!( 115 | f, 116 | "
      • {title}", 117 | title = nav.title, 118 | prefix = self.1, 119 | slug = nav.slug 120 | )?; 121 | } 122 | write!(f, "
      ")?; 123 | } 124 | Ok(()) 125 | } 126 | } 127 | 128 | fn get_footer<'a>(state: &'a RenderState) -> &'a str { 129 | state 130 | .current_bale() 131 | .footer() 132 | .unwrap_or("

      Rendered by Docket

      ") 133 | } 134 | 135 | /// The HTML Layout 136 | /// 137 | /// This struct implements the `Layout` trait to allow rendering pages. 138 | pub(super) struct HtmlLayout; 139 | 140 | impl Layout for HtmlLayout { 141 | fn render( 142 | &self, 143 | writer: &mut dyn Write, 144 | state: &RenderState, 145 | kind: PageKind, 146 | page: &Page, 147 | ) -> Result<()> { 148 | let nav_prefix = kind.path_to_bale(); 149 | write!( 150 | writer, 151 | r##" 152 | 153 | 154 | {site_name} | {page_title} 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 |
      166 |
      167 | 168 |
      169 |
      170 |
      171 |
      172 | 173 | 174 | 181 | 182 | 186 |
      187 |
      188 | {content} 189 |
      190 |
      191 |
      192 |
      {footer}
      193 | 194 | "##, 195 | site_name = state.ctx().site_name, 196 | root = state.path_to_root(&kind), 197 | breadcrumbs = Breadcrumbs(state, nav_prefix), 198 | page_title = page.title(), 199 | bale_title = state.current_bale().title(), 200 | nav_prefix = nav_prefix, 201 | navs = Navs(&state.navs, nav_prefix), 202 | toc = RenderedToc(page.content(), HeadingLevel::H3), 203 | content = Content(page.content()), 204 | footer = get_footer(state) 205 | )?; 206 | Ok(()) 207 | } 208 | 209 | fn assets(&self) -> &[Asset] { 210 | static ASSETS: [Asset; 4] = [ 211 | Asset::internal("style.css", include_str!("../../../assets/style.css")), 212 | Asset::internal("search.js", include_str!("../../../assets/search.js")), 213 | Asset::internal("dark.js", include_str!("../../../assets/dark.js")), 214 | Asset::internal( 215 | "feather-sprite.svg", 216 | include_str!("../../../assets/feather-sprite.svg"), 217 | ), 218 | ]; 219 | &ASSETS[..] 220 | } 221 | } 222 | -------------------------------------------------------------------------------- /src/search.rs: -------------------------------------------------------------------------------- 1 | //! Search Index Builder 2 | //! 3 | //! This module defines a builder API to produce term frequency indexes. These 4 | //! indexes are serialisable. Once built search indexes are written out to the 5 | //! root of the site and loaded by javascript to provide an interactive search. 6 | 7 | use serde::Serialize; 8 | use std::collections::HashMap; 9 | use std::fs::File; 10 | use std::io; 11 | use std::path::Path; 12 | 13 | /// Builder struct for search indices 14 | /// 15 | /// Keeps track of term counts as they are added. Once all terms have 16 | /// been observed the builder can be transformed into a `TermFrequenciesIndex`. 17 | #[derive(Default, Debug)] 18 | pub(crate) struct TermFrequenciesBuilder { 19 | term_count: u32, 20 | terms: HashMap, 21 | } 22 | 23 | impl TermFrequenciesBuilder { 24 | pub fn add_terms(&mut self, text: &str) -> &mut Self { 25 | for term in text.split(|c| { 26 | c == '>' || c == '<' || char::is_whitespace(c) || char::is_ascii_punctuation(&c) 27 | }) { 28 | let term = term.trim(); 29 | if !term.is_empty() { 30 | self.term_count += 1; 31 | *self.terms.entry(term.to_lowercase()).or_default() += 1; 32 | } 33 | } 34 | self 35 | } 36 | 37 | /// Finalise the Search Index 38 | /// 39 | /// Convert the term counts into a term frequencies index. 40 | pub fn finalise(self) -> TermFrequenciesIndex { 41 | let total: f64 = self.term_count.into(); 42 | let terms = self.terms; 43 | TermFrequenciesIndex( 44 | terms 45 | .into_iter() 46 | .map(|(term, count)| (term, f64::from(count) / total)) 47 | .collect(), 48 | ) 49 | } 50 | } 51 | 52 | #[derive(Debug)] 53 | pub struct TermFrequenciesIndex(HashMap); 54 | 55 | impl TermFrequenciesIndex { 56 | /// Unpack the inner frequenceis map from this index type 57 | #[allow(dead_code)] 58 | pub fn into_raw(self) -> HashMap { 59 | self.0 60 | } 61 | 62 | /// Obtain a reference to the frequencies map for this index 63 | pub fn as_raw(&self) -> &HashMap { 64 | &self.0 65 | } 66 | 67 | /// Iterate over the terms in this index 68 | #[allow(dead_code)] 69 | pub fn iter_terms(&self) -> impl Iterator { 70 | self.0.keys() 71 | } 72 | 73 | /// Iterate over the term frequencies in this index 74 | #[allow(dead_code)] 75 | pub fn iter_frequencies(&self) -> impl Iterator { 76 | self.0.iter() 77 | } 78 | } 79 | 80 | /// Searchable Document Trait 81 | /// 82 | /// This trait abstracts over elements that can appear within a search index. 83 | pub trait SearchableDocument { 84 | /// Document Title 85 | fn title(&self) -> &str; 86 | 87 | /// Document Slug 88 | fn slug(&self) -> &str; 89 | 90 | /// Search index for this document, if any 91 | fn search_index(&self) -> Option<&TermFrequenciesIndex>; 92 | } 93 | 94 | /// A Page in the Search Index 95 | /// 96 | /// This POD struct is used to serialise the search index. It is consumed by 97 | /// the `search.js` file as `search_index.json`. 98 | #[derive(Serialize)] 99 | struct SearchIndexEntry<'a> { 100 | /// The page's title. 101 | pub title: &'a str, 102 | /// The URL `slug` to use when linking to the page. 103 | pub slug: &'a str, 104 | /// The search term weights for this page. 105 | pub terms: &'a HashMap, 106 | } 107 | 108 | /// Write the built search indices to the output directory 109 | pub(crate) fn write_search_indices<'a, I, D>(output_dir: &Path, pages: I) -> Result<(), io::Error> 110 | where 111 | I: Iterator, 112 | D: SearchableDocument + 'a, 113 | { 114 | let search_index_path = output_dir.join("search_index.json"); 115 | let index_file = File::create(&search_index_path)?; 116 | let index: Vec<_> = pages 117 | .flat_map(|page| { 118 | page.search_index().map(|index| SearchIndexEntry { 119 | title: page.title(), 120 | slug: page.slug(), 121 | terms: index.as_raw(), 122 | }) 123 | }) 124 | .collect(); 125 | serde_json::to_writer(index_file, &index)?; 126 | 127 | Ok(()) 128 | } 129 | 130 | #[cfg(test)] 131 | pub mod test { 132 | use super::TermFrequenciesBuilder; 133 | 134 | #[test] 135 | pub fn empty_search_indx() { 136 | let builder = TermFrequenciesBuilder::default(); 137 | let index = builder.finalise(); 138 | 139 | assert_eq!(0, index.as_raw().len()); 140 | assert_eq!(0, index.iter_terms().count()); 141 | assert_eq!(0, index.iter_terms().count()); 142 | assert_eq!(0, index.into_raw().len()); 143 | } 144 | 145 | #[test] 146 | pub fn index_with_terms() { 147 | let mut builder = TermFrequenciesBuilder::default(); 148 | builder.add_terms("a test a string"); 149 | let index = builder.finalise(); 150 | 151 | assert_eq!(3, index.iter_terms().count()); 152 | let mut terms: Vec<_> = index.iter_terms().cloned().collect(); 153 | let index = index.into_raw(); 154 | terms.sort(); 155 | assert_eq!(vec!["a", "string", "test"], terms); 156 | assert!(index.get("a").unwrap() > index.get("test").unwrap()); 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /src/toc.rs: -------------------------------------------------------------------------------- 1 | //! Tree of Contents 2 | //! 3 | //! This module defines a tree structre over the events of `pulldown_cmark`. The 4 | //! tree can be rendered into HTML with `pulldown`, or quieried for the document 5 | //! layout in order to produce navigation elements. 6 | 7 | use std::{borrow::Borrow, iter::Peekable}; 8 | 9 | use log::error; 10 | use pulldown_cmark::*; 11 | 12 | use crate::{ 13 | highlight, 14 | search::{TermFrequenciesBuilder, TermFrequenciesIndex}, 15 | utils, 16 | }; 17 | 18 | /// # A single ement in the TOC 19 | /// 20 | /// Represents either a pre-rendered HTML blob, a reference to insert the full 21 | /// TOC, or a nested toc node. 22 | #[derive(Debug, PartialEq)] 23 | pub(crate) enum TocElement { 24 | /// Raw Pulldown events 25 | Html(String), 26 | 27 | /// TOC references 28 | TocReference, 29 | 30 | /// A node in the tree 31 | Node(TocNode), 32 | } 33 | 34 | /// # A heading 35 | /// 36 | /// Headings from the raw markdown document with extra metadata to allow the TOC 37 | /// to be rendered. 38 | #[derive(Debug, PartialEq)] 39 | pub(crate) struct Heading { 40 | /// The header level. H1 .. H6 41 | pub level: HeadingLevel, 42 | 43 | /// The raw contents for this heading. 44 | pub contents: String, 45 | 46 | /// The fragment identifier, or slug, to use for this heading. 47 | pub slug: String, 48 | } 49 | 50 | /// # TOC Node 51 | /// 52 | /// A node in the TOC tree. A node has a heading that introduced the node and a 53 | /// list of children. 54 | #[derive(Debug, PartialEq)] 55 | pub(crate) struct TocNode { 56 | /// The heading at this node 57 | pub heading: Heading, 58 | 59 | /// The TOC contents for this node. 60 | pub contents: Vec, 61 | } 62 | 63 | impl TocNode { 64 | pub fn nodes(&self) -> Nodes { 65 | Nodes(&self.contents, 0) 66 | } 67 | } 68 | 69 | /// Toc Element Iterator 70 | /// 71 | /// This iterator performs a depth-first walk of the element tree 72 | pub(crate) struct Elements<'a>(Vec<&'a TocElement>); 73 | 74 | impl<'a> Elements<'a> { 75 | pub fn new(elements: &'a [TocElement]) -> Self { 76 | Elements(Vec::from_iter(elements.iter().rev())) 77 | } 78 | } 79 | 80 | impl<'a> Iterator for Elements<'a> { 81 | type Item = &'a TocElement; 82 | 83 | fn next(&mut self) -> Option { 84 | let next = self.0.pop(); 85 | if let Some(TocElement::Node(node)) = next { 86 | self.0.extend(node.contents.iter().rev()) 87 | } 88 | 89 | next 90 | } 91 | } 92 | 93 | /// Toc Node Iterator 94 | /// 95 | /// Enumerates all the nodes within a given set of elements. 96 | pub(crate) struct Nodes<'a>(&'a [TocElement], usize); 97 | 98 | impl<'a> Iterator for Nodes<'a> { 99 | type Item = &'a TocNode; 100 | 101 | fn next(&mut self) -> Option { 102 | while let Some(element) = self.0.get(self.1) { 103 | self.1 = self.1 + 1; 104 | if let TocElement::Node(node) = element { 105 | return Some(&node); 106 | } 107 | } 108 | 109 | None 110 | } 111 | } 112 | 113 | /// # Tree of Contents 114 | /// 115 | /// The tree of contents is the basic unit of pages within the document tree. A 116 | /// page contains a single Tree of Contents. The tree is a list of elements 117 | /// which mirror the nesting of the document's heading structure. 118 | /// 119 | /// A tree can be queried for information about the document's outline, primary 120 | /// heading, or full contnet. The layout module uses the public API of the `Toc` 121 | /// to render out page's contents, internal navigation, and title information. 122 | #[derive(Debug)] 123 | pub(crate) struct Toc(Vec, TermFrequenciesIndex); 124 | 125 | impl Toc { 126 | /// # Parse a Tree of Contents 127 | /// 128 | /// Given a markdown string parse it and return a vector containing the 129 | /// top-level elements in the document's tree. 130 | pub fn new(markdown: &str) -> Self { 131 | let parser = Parser::new_ext(markdown, Options::all()); 132 | let mut index_builder = TermFrequenciesBuilder::default(); 133 | let parser = build_search_index(&mut index_builder, parser); 134 | let parser = hl_codeblocks(parser); 135 | let events = parse_toc_events(parser); 136 | Toc(events, index_builder.finalise()) 137 | } 138 | 139 | /// # Primary Heading 140 | /// 141 | /// Get the first heading within the tree. If the tree contains no headings 142 | /// then `None` is returned. 143 | pub fn primary_heading(&self) -> Option<&String> { 144 | self.0.iter().find_map(|element| match element { 145 | TocElement::Node(node) => Some(&node.heading.contents), 146 | _ => None, 147 | }) 148 | } 149 | 150 | /// # Get the Nodes Iterator 151 | /// 152 | /// Returns an iterator over the nodes within the root of the tree. 153 | pub fn nodes(&self) -> Nodes { 154 | Nodes(&self.0, 0) 155 | } 156 | 157 | /// # Depth-frist walk of the elements of the tree 158 | pub fn walk_elements(&self) -> Elements { 159 | Elements::new(&self.0) 160 | } 161 | 162 | /// # Unwrap the Inner Elements 163 | #[cfg(test)] 164 | fn into_inner(self) -> Vec { 165 | self.0 166 | } 167 | 168 | /// # Get the Page's Search Index 169 | /// 170 | /// The search index contains the raw term frequencies for the document's 171 | /// content. 172 | pub fn search_index(&self) -> &TermFrequenciesIndex { 173 | &self.1 174 | } 175 | } 176 | 177 | fn hl_codeblocks<'a, I>(parser: I) -> impl Iterator> 178 | where 179 | I: Iterator>, 180 | { 181 | let mut state: Option = None; 182 | let hl = highlight::get_hilighter(); 183 | parser.flat_map(move |event| { 184 | if let Some(mut hl_state) = state.take() { 185 | match event { 186 | Event::Text(txt) => { 187 | hl_state.push_str(txt.as_ref()); 188 | state = Some(hl_state); 189 | vec![] 190 | } 191 | Event::End(Tag::CodeBlock(kind)) => { 192 | state = None; 193 | hl.hl_codeblock( 194 | match &kind { 195 | CodeBlockKind::Indented => None, 196 | CodeBlockKind::Fenced(name) => Some(name.as_ref()), 197 | }, 198 | &hl_state, 199 | ) 200 | } 201 | _ => { 202 | error!("Unexpected item in codeblock: {:?}", event); 203 | vec![event] 204 | } 205 | } 206 | } else { 207 | match event { 208 | Event::Start(Tag::CodeBlock(_)) => { 209 | state = Some(String::new()); 210 | vec![] 211 | } 212 | _ => vec![event], 213 | } 214 | } 215 | }) 216 | } 217 | 218 | fn build_search_index<'a, 'p, I>( 219 | index_builder: &'p mut TermFrequenciesBuilder, 220 | parser: I, 221 | ) -> impl Iterator> + 'p 222 | where 223 | I: Iterator> + 'p, 224 | { 225 | parser.inspect(|event| match event { 226 | Event::Code(code) => { 227 | index_builder.add_terms(&code); 228 | } 229 | Event::Text(txt) => { 230 | index_builder.add_terms(&txt); 231 | } 232 | Event::Html(htm) => { 233 | index_builder.add_terms(&htm); 234 | } 235 | _ => (), 236 | }) 237 | } 238 | 239 | /// Get the inner text from a series of events. used to create a heading name 240 | /// from a series of events, or to find the text that should be 241 | fn events_to_plain<'a, I, E>(events: I) -> String 242 | where 243 | I: Iterator, 244 | E: Borrow>, 245 | { 246 | let mut text = String::new(); 247 | 248 | for ev in events { 249 | match ev.borrow() { 250 | Event::Text(txt) => text.push_str(txt.as_ref()), 251 | Event::Code(code) => text.push_str(code.as_ref()), 252 | Event::Html(htm) => text.push_str(htm.as_ref()), 253 | _ => (), 254 | } 255 | } 256 | 257 | text 258 | } 259 | 260 | /// # Drain events to HTML 261 | /// 262 | /// If the events vector contains any buffered events then return the rendred 263 | /// HTML. If the buffer is empty then return `None`. This utility is used during 264 | /// the TOC walk to ensure we always render HTML if we have events buffered, and 265 | /// that we don't include spurious HTML nodes when there are no buffered events. 266 | fn drain_events_to_html(events: &mut Vec) -> Option { 267 | if events.is_empty() { 268 | None 269 | } else { 270 | let mut result = String::new(); 271 | pulldown_cmark::html::push_html(&mut result, events.drain(..)); 272 | Some(result) 273 | } 274 | } 275 | 276 | /// Parse a TOC tree from the headers in the markdown document 277 | fn parse_toc_events<'a, I>(events: I) -> Vec 278 | where 279 | I: Iterator>, 280 | { 281 | parse_toc_at_level(None, &mut events.peekable()) 282 | } 283 | 284 | /// Parse the toc tree at a given header level. 285 | fn parse_toc_at_level<'a, I>( 286 | level: Option, 287 | events: &mut Peekable, 288 | ) -> Vec 289 | where 290 | I: Iterator>, 291 | { 292 | let mut buffered = Vec::new(); 293 | let mut elements = Vec::new(); 294 | 295 | while let Some(event) = events.next_if(|event| is_below(level, event)) { 296 | match event { 297 | // If we see a heading tag then start building a heading 298 | Event::Start(Tag::Heading(..)) => { 299 | if let Some(element) = drain_events_to_html(&mut buffered) { 300 | elements.push(TocElement::Html(element)); 301 | } 302 | } 303 | // If we see a heading end tag then recurse to parse any 304 | // elements owned by that heading. 305 | Event::End(Tag::Heading(level, frag, _class)) => { 306 | // Not we didn't push the opening event _and_ we ignore the 307 | // closing one here too. This means we will only render the 308 | // _contents_ of the header, not the opening and closing tags. 309 | let slug = frag 310 | .map(|s| s.to_owned()) 311 | .unwrap_or_else(|| utils::slugify(&events_to_plain(buffered.iter()))); 312 | elements.push(TocElement::Node(TocNode { 313 | heading: Heading { 314 | level, 315 | contents: drain_events_to_html(&mut buffered).unwrap_or(String::new()), 316 | slug, 317 | }, 318 | contents: parse_toc_at_level(Some(level), events), 319 | })) 320 | } 321 | // If we see a closing paragraph then check if we're looking at 322 | // a `[TOC]` reference. If we are then replace the paragraph 323 | // with a marker. 324 | Event::End(Tag::Paragraph) => { 325 | if in_toc(&buffered) { 326 | buffered.truncate(buffered.len() - 4); 327 | if let Some(html) = drain_events_to_html(&mut buffered) { 328 | elements.push(TocElement::Html(html)); 329 | } 330 | elements.push(TocElement::TocReference); 331 | } else { 332 | buffered.push(Event::End(Tag::Paragraph)); 333 | } 334 | } 335 | // A normal event 336 | ev => buffered.push(ev), 337 | } 338 | } 339 | 340 | // If we have any events left then make sure to append them here. 341 | if let Some(element) = drain_events_to_html(&mut buffered) { 342 | elements.push(TocElement::Html(element)); 343 | } 344 | 345 | elements 346 | } 347 | 348 | /// Check if we have just seen a `

      `, `[`, `TOC`, and `]` 349 | fn in_toc(current: &[Event]) -> bool { 350 | let idx = current.len() - 1; 351 | if let Some(Event::Text(ref toc)) = current.get(idx) { 352 | if toc.as_ref() != "]" { 353 | return false; 354 | } 355 | } else { 356 | return false; 357 | } 358 | if let Some(Event::Text(ref toc)) = current.get(idx - 1) { 359 | if toc.as_ref() != "TOC" { 360 | return false; 361 | } 362 | } else { 363 | return false; 364 | } 365 | if let Some(Event::Text(ref toc)) = current.get(idx - 2) { 366 | if toc.as_ref() != "[" { 367 | return false; 368 | } 369 | } else { 370 | return false; 371 | } 372 | if let Some(Event::Start(Tag::Paragraph)) = current.get(idx - 3) { 373 | true 374 | } else { 375 | false 376 | } 377 | } 378 | 379 | // Check if the current event should live below the given heading level. 380 | fn is_below(level: Option, event: &Event) -> bool { 381 | level 382 | .map(|level| match event { 383 | Event::Start(Tag::Heading(ref next_level, ..)) => *next_level > level, 384 | _ => true, 385 | }) 386 | .unwrap_or(true) 387 | } 388 | 389 | #[cfg(test)] 390 | mod test { 391 | use super::*; 392 | fn h(level: HeadingLevel, contents: &str) -> Heading { 393 | let slug = utils::slugify(&contents); 394 | hslug(level, contents, &slug) 395 | } 396 | 397 | fn hslug(level: HeadingLevel, contents: &str, slug: &str) -> Heading { 398 | Heading { 399 | level, 400 | contents: contents.into(), 401 | slug: slug.into(), 402 | } 403 | } 404 | 405 | fn parse_toc(s: &str) -> Vec { 406 | Toc::new(s).into_inner() 407 | } 408 | 409 | #[test] 410 | fn parse_example_doc_toc() { 411 | let parser = Parser::new( 412 | " 413 | # Heading 1.1 414 | 415 | para one 416 | 417 | ## Heading 2.1 418 | 419 | para two 420 | 421 | para three 422 | 423 | ### Heading 3.1 424 | 425 | ```code 426 | block four 427 | ``` 428 | 429 | ## Heading 2.2 430 | 431 | 432 | 433 | # Heading 1.2 434 | 435 | > last bit", 436 | ); 437 | let toc = parse_toc_events(parser); 438 | 439 | assert_eq!(2, toc.len()); 440 | } 441 | 442 | #[test] 443 | fn parse_with_no_headings() { 444 | let doc = "hello world"; 445 | let parser = Parser::new(doc); 446 | 447 | let toc = parse_toc_events(parser); 448 | 449 | assert_eq!(vec![TocElement::Html("

      hello world

      \n".into())], toc); 450 | } 451 | 452 | #[test] 453 | fn parse_with_single_heading() { 454 | let doc = "# I am an H1"; 455 | 456 | let toc = parse_toc(doc); 457 | 458 | assert_eq!( 459 | vec![TocElement::Node(TocNode { 460 | heading: h(HeadingLevel::H1, "I am an H1"), 461 | contents: Vec::new() 462 | })], 463 | toc 464 | ); 465 | } 466 | 467 | #[test] 468 | fn parse_heading_with_nested_formatting() { 469 | let doc = "# I am `an` **H1**"; 470 | 471 | let toc = parse_toc(doc); 472 | 473 | assert_eq!( 474 | vec![TocElement::Node(TocNode { 475 | heading: hslug( 476 | HeadingLevel::H1, 477 | "I am an H1", 478 | "I-am-an-H1" 479 | ), 480 | contents: Vec::new() 481 | })], 482 | toc 483 | ); 484 | } 485 | 486 | #[test] 487 | fn parse_with_single_toc_reference() { 488 | let doc = "[TOC]"; 489 | 490 | let toc = parse_toc(&doc); 491 | 492 | assert_eq!(vec![TocElement::TocReference,], toc); 493 | } 494 | 495 | #[test] 496 | fn parse_with_nested_headings() { 497 | let doc = r#" 498 | # Heading 1.1 499 | 500 | ## Heading 2.1 501 | 502 | ### Heading 3.1 503 | 504 | ## Heading 2.2 505 | 506 | # Heading 1.2 507 | "#; 508 | 509 | let toc = parse_toc(doc); 510 | 511 | assert_eq!( 512 | vec![ 513 | TocElement::Node(TocNode { 514 | heading: h(HeadingLevel::H1, "Heading 1.1"), 515 | contents: vec![ 516 | TocElement::Node(TocNode { 517 | heading: h(HeadingLevel::H2, "Heading 2.1"), 518 | contents: vec![TocElement::Node(TocNode { 519 | heading: h(HeadingLevel::H3, "Heading 3.1"), 520 | contents: Vec::new() 521 | })], 522 | }), 523 | TocElement::Node(TocNode { 524 | heading: h(HeadingLevel::H2, "Heading 2.2"), 525 | contents: Vec::new() 526 | }), 527 | ] 528 | }), 529 | TocElement::Node(TocNode { 530 | heading: h(HeadingLevel::H1, "Heading 1.2"), 531 | contents: Vec::new() 532 | }), 533 | ], 534 | toc 535 | ) 536 | } 537 | } 538 | -------------------------------------------------------------------------------- /src/utils.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | /// Convert a string of arbitrary charactes to a form suitable for use 4 | /// as an HTML identifier or file name. 5 | pub(crate) fn slugify>(input: S) -> String { 6 | let input = input.as_ref(); 7 | let to_map = match input.find('-') { 8 | Some(offset) => { 9 | let (prefix, suffix) = input.split_at(offset + 1); 10 | if prefix[..offset].chars().all(|c| c.is_numeric()) { 11 | suffix 12 | } else { 13 | input 14 | } 15 | } 16 | None => input, 17 | }; 18 | 19 | to_map 20 | .chars() 21 | .map(|c| match c { 22 | c if c.is_whitespace() => '-', 23 | '?' | '&' | '=' | '\\' | '/' | '"' => '-', 24 | _ => c, 25 | }) 26 | .collect() 27 | } 28 | 29 | /// Slufigy a Path 30 | pub(crate) fn slugify_path>(input: P) -> String { 31 | slugify(match input.as_ref().file_stem() { 32 | Some(stem) => stem.to_string_lossy(), 33 | _ => input.as_ref().to_string_lossy(), 34 | }) 35 | } 36 | 37 | /// Get a Normalised File Extension, if available 38 | /// 39 | /// Returns the downcased representaiton of the file extension if one is 40 | /// available. If the path has no extension then `None` is returned. 41 | pub(crate) fn normalised_path_ext>(path: P) -> Option { 42 | if let Some(ext) = path.as_ref().extension() { 43 | Some(ext.to_string_lossy().to_ascii_lowercase()) 44 | } else { 45 | None 46 | } 47 | } 48 | 49 | /// Get the Normalised File Stem from the Path 50 | /// 51 | /// Returns the file's stem, that is the part before the extension, downcased 52 | /// and converted into a standard stirng. 53 | pub(crate) fn normalised_stem>(path: P) -> Option { 54 | if let Some(stem) = path.as_ref().file_stem() { 55 | Some(stem.to_string_lossy().to_ascii_lowercase()) 56 | } else { 57 | None 58 | } 59 | } 60 | 61 | /// Path or Default 62 | /// 63 | /// Try to convert a command line argument to a path. Falling back to the 64 | /// default if none is provided. 65 | pub(crate) fn path_or_default(maybe_path: Option, default: &str) -> PathBuf { 66 | maybe_path 67 | .map(PathBuf::from) 68 | .unwrap_or_else(|| default.to_owned().into()) 69 | } 70 | 71 | /// Prettify a Directory Name 72 | /// 73 | /// Trims the leading and trailing parts of the given directory name. Drops 74 | /// leading numeric characters. 75 | pub(crate) fn prettify_dir>(path: P) -> Option { 76 | path.as_ref().file_name().map(|name| { 77 | name.to_string_lossy() 78 | .trim_start_matches(char::is_numeric) 79 | .trim_start_matches(['-', '_']) 80 | .trim() 81 | .to_owned() 82 | }) 83 | } 84 | 85 | #[cfg(test)] 86 | mod test { 87 | 88 | use super::*; 89 | 90 | #[test] 91 | fn slugify_simple_strings() { 92 | assert_eq!("a", slugify("a")); 93 | assert_eq!("hello", slugify("hello")); 94 | assert_eq!("1000", slugify("1000")); 95 | } 96 | 97 | #[test] 98 | fn slugify_replaces_space_characters() { 99 | assert_eq!("hello-world", slugify("hello world")); 100 | assert_eq!("foo-bar-baz", slugify("foo\tbar baz")); 101 | } 102 | 103 | #[test] 104 | fn slufigy_replaces_url_punctuation() { 105 | assert_eq!("foo-bar", slugify("foo&bar")); 106 | assert_eq!("foo-bar", slugify("foo\"bar")); 107 | assert_eq!("foo-bar", slugify("foo?bar")); 108 | assert_eq!("foo-bar", slugify("foo=bar")); 109 | } 110 | 111 | #[test] 112 | fn slugify_replaces_path_components() { 113 | assert_eq!("yop-yop-yop", slugify("yop/yop\\yop")); 114 | } 115 | 116 | #[test] 117 | fn slugify_allows_unicode() { 118 | assert_eq!("€$§±*", slugify("€$§±*")); 119 | assert_eq!("ü¬é∆∂", slugify("ü¬é∆∂")); 120 | } 121 | 122 | #[test] 123 | fn slufigy_removes_leading_numbers() { 124 | assert_eq!("hello", slugify("01-hello")); 125 | assert_eq!("01a-foo", slugify("01a-foo")); 126 | assert_eq!("01a-foo", slugify("01a-foo")); 127 | } 128 | 129 | #[test] 130 | fn slugify_path_removes_extension() { 131 | assert_eq!("about", slugify_path(Path::new("01-about.md"))); 132 | assert_eq!("foo", slugify_path(Path::new("foo.html"))); 133 | assert_eq!("bar-baz", slugify_path(Path::new("10-bar-baz"))); 134 | assert_eq!("human2.0", slugify_path(Path::new("human2.0.html"))); 135 | } 136 | 137 | #[test] 138 | fn slugify_path_removes_leading_path() { 139 | assert_eq!("world", slugify_path(Path::new("hello/world.gif"))); 140 | assert_eq!("e", slugify_path(Path::new("a/b/c/cd/11-e.10"))); 141 | } 142 | 143 | #[test] 144 | fn normalised_path_ext_no_ext() { 145 | assert_eq!(None, normalised_path_ext("/hello/world")); 146 | assert_eq!(None, normalised_path_ext("bar::")); 147 | assert_eq!(None, normalised_path_ext("../some_path/")); 148 | } 149 | 150 | #[test] 151 | fn normalised_path_ext_returns_downcased_extension() { 152 | assert_eq!(Some(String::from("md")), normalised_path_ext("README.md")); 153 | assert_eq!(Some(String::from("rs")), normalised_path_ext("TEST.RS")); 154 | } 155 | 156 | #[test] 157 | fn normalised_stem_no_stem() { 158 | assert_eq!(None, normalised_stem("../")); 159 | } 160 | 161 | #[test] 162 | fn nromalised_stem_downcases_stem() { 163 | assert_eq!(Some(String::from("readme")), normalised_stem("README.md")); 164 | assert_eq!(Some(String::from("test")), normalised_stem("TEST.RS")); 165 | assert_eq!(Some(String::from("index")), normalised_stem("index.html")); 166 | assert_eq!( 167 | Some(String::from("world")), 168 | normalised_stem("../hello/../world/") 169 | ); 170 | assert_eq!( 171 | Some(String::from(".config")), 172 | normalised_stem("/hello/.config.world/") 173 | ); 174 | } 175 | 176 | #[test] 177 | fn prettified_dirnames() { 178 | assert_eq!(Some("Hello World"), prettify_dir("Hello World").as_deref()); 179 | assert_eq!(Some("Test"), prettify_dir("/this/is/a/Test").as_deref()); 180 | assert_eq!(Some("Test"), prettify_dir("../another/Test/.").as_deref()); 181 | assert_eq!( 182 | Some("Testing2"), 183 | prettify_dir("./futher/10-Testing2/").as_deref() 184 | ); 185 | } 186 | 187 | #[test] 188 | fn path_or_default_with_valid_argument() { 189 | let source = Some("/Users/foo/".to_owned()); 190 | assert_eq!(Path::new("/Users/foo/"), path_or_default(source, ".")); 191 | } 192 | 193 | #[test] 194 | fn path_or_default_without_argument() { 195 | let source = None; 196 | assert_eq!(Path::new("baz/"), path_or_default(source, "baz/")); 197 | } 198 | } 199 | --------------------------------------------------------------------------------