├── .gitattributes ├── .github ├── bors.toml └── workflows │ └── ci.yaml ├── .gitignore ├── .vscode ├── launch.json ├── settings.json └── tasks.json ├── Cargo.lock ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── build.rs ├── rust-toolchain.toml └── src ├── generate ├── block.rs ├── common.rs ├── device.rs ├── enumm.rs ├── fieldset.rs └── mod.rs ├── ir.rs ├── lib.rs ├── main.rs ├── svd2ir.rs ├── transform ├── add.rs ├── add_enum_variants.rs ├── add_fields.rs ├── add_interrupts.rs ├── add_registers.rs ├── common.rs ├── delete.rs ├── delete_enum_variants.rs ├── delete_enums.rs ├── delete_enums_used_in.rs ├── delete_enums_with_variants.rs ├── delete_fields.rs ├── delete_fieldsets.rs ├── delete_peripherals.rs ├── delete_registers.rs ├── delete_useless_enums.rs ├── expand_extends.rs ├── find_duplicate_enums.rs ├── find_duplicate_fieldsets.rs ├── fix_register_bit_sizes.rs ├── make_block.rs ├── make_field_array.rs ├── make_register_array.rs ├── merge_blocks.rs ├── merge_enums.rs ├── merge_fieldsets.rs ├── mod.rs ├── modify_byte_offset.rs ├── modify_fields_enum.rs ├── rename.rs ├── rename_enum_variants.rs ├── rename_fields.rs ├── rename_interrupts.rs ├── rename_peripherals.rs ├── rename_registers.rs ├── resize_enums.rs ├── sanitize.rs └── sort.rs ├── util.rs └── validate.rs /.gitattributes: -------------------------------------------------------------------------------- 1 | .vscode/*.json linguist-language=JSON-with-Comments 2 | -------------------------------------------------------------------------------- /.github/bors.toml: -------------------------------------------------------------------------------- 1 | status = [ 2 | "build", 3 | ] 4 | delete_merged_branches = true 5 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | push: 4 | 5 | env: 6 | CARGO_TERM_COLOR: always 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v2 13 | 14 | - name: Cache Dependencies 15 | uses: Swatinem/rust-cache@v1.3.0 16 | 17 | - name: Check 18 | run: | 19 | cargo check 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | [._]*.sw[a-p] 2 | *.org 3 | *.rs.bk 4 | target -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "type": "lldb", 6 | "preLaunchTask": "cargo build", 7 | "request": "launch", 8 | "name": "Debug", 9 | "program": "${workspaceFolder}/target/debug/chiptool", 10 | // Assume stm32-data and chiptool is cloned side-by-side 11 | /* 12 | "cwd": "${workspaceFolder}/..", 13 | // Add chiptool commandline arguments here to act a debug run. 14 | "args": [ 15 | "transform", 16 | "--transform", 17 | "stm32-data/transforms/SAI.yaml", 18 | "--input", 19 | "stm32-data/data/registers/sai_h5.yaml", 20 | "--output", 21 | "stm32-data/data/registers/sai_h5.yaml" 22 | ] 23 | */ 24 | } 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "rust-analyzer.procMacro.enable": true, 3 | "editor.formatOnSave": true, 4 | "rust-analyzer.imports.granularity.enforce": true, 5 | "rust-analyzer.imports.granularity.group": "module", 6 | "rust-analyzer.cargo.buildScripts.enable": true, 7 | "rust-analyzer.procMacro.attributes.enable": false 8 | } -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "type": "cargo", 6 | "command": "build", 7 | "problemMatcher": [ 8 | "$rustc" 9 | ], 10 | "group": "build", 11 | "label": "cargo build" 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "aho-corasick" 7 | version = "1.1.2" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" 10 | dependencies = [ 11 | "memchr", 12 | ] 13 | 14 | [[package]] 15 | name = "anstream" 16 | version = "0.6.11" 17 | source = "registry+https://github.com/rust-lang/crates.io-index" 18 | checksum = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5" 19 | dependencies = [ 20 | "anstyle", 21 | "anstyle-parse", 22 | "anstyle-query", 23 | "anstyle-wincon", 24 | "colorchoice", 25 | "utf8parse", 26 | ] 27 | 28 | [[package]] 29 | name = "anstyle" 30 | version = "1.0.6" 31 | source = "registry+https://github.com/rust-lang/crates.io-index" 32 | checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" 33 | 34 | [[package]] 35 | name = "anstyle-parse" 36 | version = "0.2.3" 37 | source = "registry+https://github.com/rust-lang/crates.io-index" 38 | checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" 39 | dependencies = [ 40 | "utf8parse", 41 | ] 42 | 43 | [[package]] 44 | name = "anstyle-query" 45 | version = "1.0.2" 46 | source = "registry+https://github.com/rust-lang/crates.io-index" 47 | checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" 48 | dependencies = [ 49 | "windows-sys", 50 | ] 51 | 52 | [[package]] 53 | name = "anstyle-wincon" 54 | version = "3.0.2" 55 | source = "registry+https://github.com/rust-lang/crates.io-index" 56 | checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" 57 | dependencies = [ 58 | "anstyle", 59 | "windows-sys", 60 | ] 61 | 62 | [[package]] 63 | name = "anyhow" 64 | version = "1.0.79" 65 | source = "registry+https://github.com/rust-lang/crates.io-index" 66 | checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" 67 | 68 | [[package]] 69 | name = "chiptool" 70 | version = "0.1.0" 71 | dependencies = [ 72 | "anyhow", 73 | "clap", 74 | "env_logger", 75 | "inflections", 76 | "log", 77 | "proc-macro2", 78 | "quote", 79 | "regex", 80 | "serde", 81 | "serde_yaml", 82 | "svd-parser", 83 | ] 84 | 85 | [[package]] 86 | name = "clap" 87 | version = "4.5.0" 88 | source = "registry+https://github.com/rust-lang/crates.io-index" 89 | checksum = "80c21025abd42669a92efc996ef13cfb2c5c627858421ea58d5c3b331a6c134f" 90 | dependencies = [ 91 | "clap_builder", 92 | "clap_derive", 93 | ] 94 | 95 | [[package]] 96 | name = "clap_builder" 97 | version = "4.5.0" 98 | source = "registry+https://github.com/rust-lang/crates.io-index" 99 | checksum = "458bf1f341769dfcf849846f65dffdf9146daa56bcd2a47cb4e1de9915567c99" 100 | dependencies = [ 101 | "anstream", 102 | "anstyle", 103 | "clap_lex", 104 | "strsim", 105 | ] 106 | 107 | [[package]] 108 | name = "clap_derive" 109 | version = "4.5.0" 110 | source = "registry+https://github.com/rust-lang/crates.io-index" 111 | checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47" 112 | dependencies = [ 113 | "heck", 114 | "proc-macro2", 115 | "quote", 116 | "syn", 117 | ] 118 | 119 | [[package]] 120 | name = "clap_lex" 121 | version = "0.7.0" 122 | source = "registry+https://github.com/rust-lang/crates.io-index" 123 | checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" 124 | 125 | [[package]] 126 | name = "colorchoice" 127 | version = "1.0.0" 128 | source = "registry+https://github.com/rust-lang/crates.io-index" 129 | checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" 130 | 131 | [[package]] 132 | name = "env_filter" 133 | version = "0.1.0" 134 | source = "registry+https://github.com/rust-lang/crates.io-index" 135 | checksum = "a009aa4810eb158359dda09d0c87378e4bbb89b5a801f016885a4707ba24f7ea" 136 | dependencies = [ 137 | "log", 138 | "regex", 139 | ] 140 | 141 | [[package]] 142 | name = "env_logger" 143 | version = "0.11.1" 144 | source = "registry+https://github.com/rust-lang/crates.io-index" 145 | checksum = "05e7cf40684ae96ade6232ed84582f40ce0a66efcd43a5117aef610534f8e0b8" 146 | dependencies = [ 147 | "anstream", 148 | "anstyle", 149 | "env_filter", 150 | "humantime", 151 | "log", 152 | ] 153 | 154 | [[package]] 155 | name = "equivalent" 156 | version = "1.0.1" 157 | source = "registry+https://github.com/rust-lang/crates.io-index" 158 | checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" 159 | 160 | [[package]] 161 | name = "hashbrown" 162 | version = "0.14.3" 163 | source = "registry+https://github.com/rust-lang/crates.io-index" 164 | checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" 165 | 166 | [[package]] 167 | name = "heck" 168 | version = "0.4.1" 169 | source = "registry+https://github.com/rust-lang/crates.io-index" 170 | checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" 171 | 172 | [[package]] 173 | name = "humantime" 174 | version = "2.1.0" 175 | source = "registry+https://github.com/rust-lang/crates.io-index" 176 | checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" 177 | 178 | [[package]] 179 | name = "indexmap" 180 | version = "2.2.3" 181 | source = "registry+https://github.com/rust-lang/crates.io-index" 182 | checksum = "233cf39063f058ea2caae4091bf4a3ef70a653afbc026f5c4a4135d114e3c177" 183 | dependencies = [ 184 | "equivalent", 185 | "hashbrown", 186 | ] 187 | 188 | [[package]] 189 | name = "inflections" 190 | version = "1.1.1" 191 | source = "registry+https://github.com/rust-lang/crates.io-index" 192 | checksum = "a257582fdcde896fd96463bf2d40eefea0580021c0712a0e2b028b60b47a837a" 193 | 194 | [[package]] 195 | name = "itoa" 196 | version = "1.0.10" 197 | source = "registry+https://github.com/rust-lang/crates.io-index" 198 | checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" 199 | 200 | [[package]] 201 | name = "log" 202 | version = "0.4.20" 203 | source = "registry+https://github.com/rust-lang/crates.io-index" 204 | checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" 205 | 206 | [[package]] 207 | name = "memchr" 208 | version = "2.7.1" 209 | source = "registry+https://github.com/rust-lang/crates.io-index" 210 | checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" 211 | 212 | [[package]] 213 | name = "once_cell" 214 | version = "1.19.0" 215 | source = "registry+https://github.com/rust-lang/crates.io-index" 216 | checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" 217 | 218 | [[package]] 219 | name = "proc-macro2" 220 | version = "1.0.78" 221 | source = "registry+https://github.com/rust-lang/crates.io-index" 222 | checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" 223 | dependencies = [ 224 | "unicode-ident", 225 | ] 226 | 227 | [[package]] 228 | name = "quote" 229 | version = "1.0.35" 230 | source = "registry+https://github.com/rust-lang/crates.io-index" 231 | checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" 232 | dependencies = [ 233 | "proc-macro2", 234 | ] 235 | 236 | [[package]] 237 | name = "regex" 238 | version = "1.11.1" 239 | source = "registry+https://github.com/rust-lang/crates.io-index" 240 | checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" 241 | dependencies = [ 242 | "aho-corasick", 243 | "memchr", 244 | "regex-automata", 245 | "regex-syntax", 246 | ] 247 | 248 | [[package]] 249 | name = "regex-automata" 250 | version = "0.4.9" 251 | source = "registry+https://github.com/rust-lang/crates.io-index" 252 | checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" 253 | dependencies = [ 254 | "aho-corasick", 255 | "memchr", 256 | "regex-syntax", 257 | ] 258 | 259 | [[package]] 260 | name = "regex-syntax" 261 | version = "0.8.5" 262 | source = "registry+https://github.com/rust-lang/crates.io-index" 263 | checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" 264 | 265 | [[package]] 266 | name = "roxmltree" 267 | version = "0.19.0" 268 | source = "registry+https://github.com/rust-lang/crates.io-index" 269 | checksum = "3cd14fd5e3b777a7422cca79358c57a8f6e3a703d9ac187448d0daf220c2407f" 270 | 271 | [[package]] 272 | name = "ryu" 273 | version = "1.0.16" 274 | source = "registry+https://github.com/rust-lang/crates.io-index" 275 | checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" 276 | 277 | [[package]] 278 | name = "serde" 279 | version = "1.0.196" 280 | source = "registry+https://github.com/rust-lang/crates.io-index" 281 | checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" 282 | dependencies = [ 283 | "serde_derive", 284 | ] 285 | 286 | [[package]] 287 | name = "serde_derive" 288 | version = "1.0.196" 289 | source = "registry+https://github.com/rust-lang/crates.io-index" 290 | checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" 291 | dependencies = [ 292 | "proc-macro2", 293 | "quote", 294 | "syn", 295 | ] 296 | 297 | [[package]] 298 | name = "serde_yaml" 299 | version = "0.9.34-deprecated" 300 | source = "registry+https://github.com/rust-lang/crates.io-index" 301 | checksum = "d4f17ab28832fcb8e88a0e938aaa915b4f4618142bd011d4e6a3060028974c47" 302 | dependencies = [ 303 | "indexmap", 304 | "itoa", 305 | "ryu", 306 | "serde", 307 | "unsafe-libyaml", 308 | ] 309 | 310 | [[package]] 311 | name = "strsim" 312 | version = "0.11.0" 313 | source = "registry+https://github.com/rust-lang/crates.io-index" 314 | checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" 315 | 316 | [[package]] 317 | name = "svd-parser" 318 | version = "0.14.5" 319 | source = "git+https://github.com/Dirbaio/svd.git?rev=4d5c96f95b32acf9c9bfbda5a0619a2374475fe7#4d5c96f95b32acf9c9bfbda5a0619a2374475fe7" 320 | dependencies = [ 321 | "anyhow", 322 | "roxmltree", 323 | "svd-rs", 324 | "thiserror", 325 | ] 326 | 327 | [[package]] 328 | name = "svd-rs" 329 | version = "0.14.7" 330 | source = "git+https://github.com/Dirbaio/svd.git?rev=4d5c96f95b32acf9c9bfbda5a0619a2374475fe7#4d5c96f95b32acf9c9bfbda5a0619a2374475fe7" 331 | dependencies = [ 332 | "once_cell", 333 | "regex", 334 | "thiserror", 335 | ] 336 | 337 | [[package]] 338 | name = "syn" 339 | version = "2.0.48" 340 | source = "registry+https://github.com/rust-lang/crates.io-index" 341 | checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" 342 | dependencies = [ 343 | "proc-macro2", 344 | "quote", 345 | "unicode-ident", 346 | ] 347 | 348 | [[package]] 349 | name = "thiserror" 350 | version = "1.0.57" 351 | source = "registry+https://github.com/rust-lang/crates.io-index" 352 | checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" 353 | dependencies = [ 354 | "thiserror-impl", 355 | ] 356 | 357 | [[package]] 358 | name = "thiserror-impl" 359 | version = "1.0.57" 360 | source = "registry+https://github.com/rust-lang/crates.io-index" 361 | checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" 362 | dependencies = [ 363 | "proc-macro2", 364 | "quote", 365 | "syn", 366 | ] 367 | 368 | [[package]] 369 | name = "unicode-ident" 370 | version = "1.0.12" 371 | source = "registry+https://github.com/rust-lang/crates.io-index" 372 | checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" 373 | 374 | [[package]] 375 | name = "unsafe-libyaml" 376 | version = "0.2.11" 377 | source = "registry+https://github.com/rust-lang/crates.io-index" 378 | checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" 379 | 380 | [[package]] 381 | name = "utf8parse" 382 | version = "0.2.1" 383 | source = "registry+https://github.com/rust-lang/crates.io-index" 384 | checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" 385 | 386 | [[package]] 387 | name = "windows-sys" 388 | version = "0.52.0" 389 | source = "registry+https://github.com/rust-lang/crates.io-index" 390 | checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" 391 | dependencies = [ 392 | "windows-targets", 393 | ] 394 | 395 | [[package]] 396 | name = "windows-targets" 397 | version = "0.52.0" 398 | source = "registry+https://github.com/rust-lang/crates.io-index" 399 | checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" 400 | dependencies = [ 401 | "windows_aarch64_gnullvm", 402 | "windows_aarch64_msvc", 403 | "windows_i686_gnu", 404 | "windows_i686_msvc", 405 | "windows_x86_64_gnu", 406 | "windows_x86_64_gnullvm", 407 | "windows_x86_64_msvc", 408 | ] 409 | 410 | [[package]] 411 | name = "windows_aarch64_gnullvm" 412 | version = "0.52.0" 413 | source = "registry+https://github.com/rust-lang/crates.io-index" 414 | checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" 415 | 416 | [[package]] 417 | name = "windows_aarch64_msvc" 418 | version = "0.52.0" 419 | source = "registry+https://github.com/rust-lang/crates.io-index" 420 | checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" 421 | 422 | [[package]] 423 | name = "windows_i686_gnu" 424 | version = "0.52.0" 425 | source = "registry+https://github.com/rust-lang/crates.io-index" 426 | checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" 427 | 428 | [[package]] 429 | name = "windows_i686_msvc" 430 | version = "0.52.0" 431 | source = "registry+https://github.com/rust-lang/crates.io-index" 432 | checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" 433 | 434 | [[package]] 435 | name = "windows_x86_64_gnu" 436 | version = "0.52.0" 437 | source = "registry+https://github.com/rust-lang/crates.io-index" 438 | checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" 439 | 440 | [[package]] 441 | name = "windows_x86_64_gnullvm" 442 | version = "0.52.0" 443 | source = "registry+https://github.com/rust-lang/crates.io-index" 444 | checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" 445 | 446 | [[package]] 447 | name = "windows_x86_64_msvc" 448 | version = "0.52.0" 449 | source = "registry+https://github.com/rust-lang/crates.io-index" 450 | checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" 451 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "chiptool" 3 | license = "MIT OR Apache-2.0" 4 | version = "0.1.0" 5 | edition = "2021" 6 | 7 | [dependencies] 8 | clap = { version = "4.5.0", features = ["derive"] } 9 | env_logger = "0.11.1" 10 | inflections = "1.1" 11 | log = { version = "~0.4", features = ["std"] } 12 | quote = "1.0" 13 | proc-macro2 = "1.0" 14 | anyhow = "1.0.79" 15 | regex = "1.10.3" 16 | serde = { version = "1.0.196", features = [ "derive" ]} 17 | svd-parser = { git = "https://github.com/Dirbaio/svd.git", rev = "4d5c96f95b32acf9c9bfbda5a0619a2374475fe7", features = ["derive-from", "expand"] } 18 | #svd-parser = { path = "./svd/svd-parser", features = ["derive-from", "expand"] } 19 | # Development has stopped for `serde_yaml` 20 | serde_yaml = "=0.9.34-deprecated" 21 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016 Jorge Aparicio 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # chiptool 2 | 3 | `chiptool` is an experimental fork of `svd2rust` to experiment with: 4 | 5 | - Different API for the generated code. 6 | - Integrating "transforms" in the generation process 7 | - New workflow for storing register definitions in standalone YAML files. 8 | 9 | ## PACs using chiptool 10 | 11 | It is mainly used by [Embassy project][embassy] to generate PACs targeting 12 | Raspberry Pi RP2040 and RP2350, Atmel STM32-series and Nordic Semiconductor 13 | nRF5x-series microcontrollers. Other SVDs might not work quite right yet. 14 | 15 | - https://github.com/embassy-rs/rp-pac 16 | - https://github.com/embassy-rs/nrf-pac 17 | - https://github.com/embassy-rs/stm32-data 18 | 19 | ## Installation 20 | 21 | ```bash 22 | cargo install --git https://github.com/embassy-rs/chiptool --locked 23 | ``` 24 | 25 | ## Changes from svd2rust main 26 | 27 | ### No owned structs 28 | 29 | Original svd2rust generates an owned struct for each peripheral. This has turned out to have some severe downsides: 30 | 31 | 1. there are many cases where the HAL wants to "split up" a peripheral into multiple owned parts. Examples: 32 | - Many pins in a GPIO port peripheral. 33 | - The RX and TX halfs of a UART peripheral. 34 | - Different clocks/PLLs in a clock control peripheral. 35 | - Channels/streams in a DMA controller 36 | - PWM channels 37 | 38 | Virtually all existing HALs run into this issue, and have to unsafely bypass the ownership rules. [nrf gpio](https://github.com/nrf-rs/nrf-hal/blob/6fc5061509d5f3efaa2db15d4af7e3bced4a2e83/nrf-hal-common/src/gpio.rs#L135), [nrf i2c](https://github.com/nrf-rs/nrf-hal/blob/1d6e228f11b7df3847d33d66b01ff772501beb3c/nrf-hal-common/src/twi.rs#L28), [nrf ppi](https://github.com/nrf-rs/nrf-hal/blob/8a28455ab93eb47be4e4edb62ebe96939e1a7ebd/nrf-hal-common/src/ppi/mod.rs#L122), [stm32f4 gpio](https://github.com/stm32-rs/stm32f4xx-hal/blob/9b6aad4b3365a48ae652c315730ab47522e57cfb/src/gpio.rs#L302), [stm32f4 dma](https://github.com/stm32-rs/stm32f4xx-hal/blob/9b6aad4b3365a48ae652c315730ab47522e57cfb/src/dma/mod.rs#L359), [stm32f4 pwm](https://github.com/stm32-rs/stm32f4xx-hal/blob/bb214b6017d84a9c8dd2e8c9fd1f915141e167cc/src/pwm.rs#L228), [atsamd gpio](https://github.com/atsamd-rs/atsamd/blob/4816bb13a12a604e51f929d17b286071a0082c82/hal/src/common/gpio/v2/pin.rs#L669) ... 39 | 40 | Since HALs in practice always bypass the PAC ownership rules and create their own safe abstractions, there's not much advantage in having ownership rules in the PAC in the first place. Not having them makes HAL code cleaner. 41 | 42 | 2. sometimes "ownership" is not so clear-cut: 43 | - Multicore. Some peripherals are "core-local", they have an instance per core. Constant address, which instance you access depends on which core you're running on. For example Cortex-M core peripherals, and SIO in RP2040. 44 | - Mutually-exclusive peripherals. In nRF you can only use one of (UART0, SPIM0, SPIS0, TWIM0, TWIS0) at the same time, one of (UART1, SPIM1, SPIS1, TWIM1, TWIS1) at the same time... They're the same peripheral in different "modes". Current nRF PACs get this wrong, allowing you to use e.g. SPIM0 and TWIM0 at the same time, which breaks. 45 | 3. Ownership in PACs means upgrading the PAC is ALWAYS a breaking change. 46 | 47 | To guarantee you can't get two singletons for the same peripheral, PACs deliberately sabotage building a binary containing two PAC major versions (with this [no\_mangle thing](https://github.com/nrf-rs/nrf-pacs/blob/8f9da05ca1b496bd743f223ed1122dfe9220956c/pacs/nrf52840-pac/src/lib.rs#L2279-L2280)). 48 | 49 | This means the HAL major-bumping the PAC dep version is a breaking change, so the HAL would have to be major-bumped as well. And all PAC bumps are breaking, and they're VERY common... 50 | 51 | ### Structs representing register values (sets of fields) 52 | 53 | Current svd2rust provides "read proxy" and "write proxy" structs with methods to access register fields when reading/writing. However: 54 | 55 | - There's no type-safe way to save the _value_ of a register in a variable to write later. (there's `.bits()`, but it's not typesafe) 56 | - There's no way to read/modify register fields on a saved value (if using `.bits()`, the user has a raw u32, they need to extract the fields manually with bitwise manipulation) 57 | 58 | Solution: for each register with fields, a "fieldset" struct is generated. This struct wraps the raw `u32` and allows getting/setting individual fields. 59 | 60 | ```rust 61 | let val = pac::watchdog::fields::Tick(0); 62 | val.set_cycles(XOSC_MHZ as u16); 63 | val.set_enable(true); 64 | info!("enabled: {:bool}", val.enable()); 65 | ``` 66 | 67 | On a register, `.read()` and `.write_value()` can get and set such fieldset values: 68 | 69 | ```rust 70 | let val = pac::WATCHDOG.tick().read(); 71 | val.set_enable(false); 72 | // We could save val in a variable somewhere else 73 | // then get it and write it back later 74 | pac::WATCHDOG.tick().write_value(val); 75 | ``` 76 | 77 | Closure-based `.write()` and `.modify()` are provided too, like the current svd2rust. 78 | 79 | ```rust 80 | pac::WATCHDOG.tick().write(|w| { 81 | w.set_cycles(XOSC_MHZ as u16); 82 | w.set_enable(true); 83 | }); 84 | ``` 85 | 86 | ### Structs representing enumerated values 87 | 88 | For each EnumeratedValues in a field, a struct is generated. 89 | 90 | This struct is _not_ a Rust enum, it is a struct with associated constants. 91 | 92 | ### Possibility to share items (blocks, fieldsets, enums) 93 | 94 | Many peripherals have multiple registers with the same fields (same names, same bit offsets). This tool allows the user to merge them via YAML config. Same for enums and register blocks. 95 | 96 | Fieldsets and enums can be shared across different registers, different register blocks, even different peripherals. 97 | 98 | Example: the RP2040 chip has two GPIO banks: `BANK0` and `QSPI`. These share many enums and field sets. Example of merging some: 99 | 100 | ```yaml 101 | - MergeEnums: 102 | from: io_[^:]+::values::Gpio.+Ctrl(.+)over 103 | to: io::values::${1}over 104 | ``` 105 | 106 | This merges all `INOVER`, `OUTOVER`, `OEOVER` and `IRQOVER` enums (144 enums!) into just 4. 107 | 108 | - huge reduction in generated code, mitigating long compile times which is one of the top complaints of current PACs. 109 | - Better code sharing in HALs since they can use a single enum/fieldset to read/write to multiple registers. 110 | 111 | ### Automatic cluster creation 112 | 113 | ```yaml 114 | - MakeBlock: 115 | block: pio0::Pio0 116 | from: sm(\d+)_(.+) 117 | to_outer: sm$1 118 | to_inner: $2 119 | to_block: pio0::StateMachine 120 | ``` 121 | 122 | This collapses all `smX_*` registers into a single cluster: 123 | 124 | // before: 125 | RegisterBlock: 126 | sm0_clkdiv 127 | sm0_execctrl 128 | sm0_shiftctrl 129 | sm0_addr 130 | sm0_instr 131 | sm0_pinctrl 132 | sm1_clkdiv 133 | sm1_execctrl 134 | sm1_shiftctrl 135 | sm1_addr 136 | sm1_instr 137 | sm1_pinctrl 138 | sm2_clkdiv 139 | sm2_execctrl 140 | sm2_shiftctrl 141 | sm2_addr 142 | sm2_instr 143 | sm2_pinctrl 144 | sm3_clkdiv 145 | sm3_execctrl 146 | sm3_shiftctrl 147 | sm3_addr 148 | sm3_instr 149 | sm3_pinctrl 150 | 151 | // after: 152 | RegisterBlock: 153 | sm0 154 | sm1 155 | sm2 156 | sm3 157 | 158 | StateMachine block: 159 | clkdiv 160 | execctrl 161 | shiftctrl 162 | addr 163 | instr 164 | pinctrl 165 | 166 | ### Automatic array creation 167 | 168 | example: 169 | 170 | ```yaml 171 | - MakeRegisterArray: 172 | block: pio0::Pio0 173 | from: sm\d+ 174 | to: sm 175 | ``` 176 | 177 | // before: 178 | RegisterBlock: 179 | sm0 180 | sm1 181 | sm2 182 | sm3 183 | 184 | // after: 185 | RegisterBlock: 186 | sm (array of length 4) 187 | 188 | ### RegisterBlocks and Registers wrap pointers 189 | 190 | ```rust 191 | // a RegisterBlock 192 | pub struct Resets { 193 | ptr: *mut u8 194 | } 195 | 196 | impl Resets { 197 | // A register access function. This is just pointer arithmetic 198 | pub fn reset_done(self) -> Reg { 199 | Reg::new(self.0.add(8usize)) 200 | } 201 | } 202 | 203 | // the Reg struct 204 | pub struct Reg { 205 | ptr: *mut u8, 206 | ... 207 | } 208 | ``` 209 | 210 | - No need to calculate and fill padding holes in RegisterBlock structs 211 | - No problem if registers overlap (currently svd2rust has to check for this, and falls back to a function-based codegen similar to this one) 212 | - Pointer provenance is not erased. Previous codegen causes pointers to become references (&), so it's undefined behavior to do arithmetic with a register pointer to write somewhere else. This is useful in a few niche situations: 213 | - calculating a pointer to a particular register bit in the bitbanding region 214 | - The RP2040 chip has register aliases that atomically set/clear/xor register bits at addr + 0x1000/0x2000/0x3000 215 | 216 | This generates the same assembly code as original svd2rust when optimizations are enabled. 217 | 218 | ## Running 219 | 220 | mkdir -p out 221 | mkdir -p out/src 222 | cargo run -- -i svd/rp2040.svd -c svd/rp2040.yaml 223 | rustfmt out/src/lib.rs 224 | (cd out; cargo build && cargo doc) 225 | 226 | ## To-Do 227 | 228 | Nice to have features: 229 | 230 | - More transforms (deletes, renames, move entire module...) 231 | - clean up doc comments better 232 | 233 | ## License 234 | 235 | Licensed under either of 236 | 237 | - Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or 238 | http://www.apache.org/licenses/LICENSE-2.0) 239 | - MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) 240 | 241 | at your option. 242 | 243 | ### Contribution 244 | 245 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the 246 | work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any 247 | additional terms or conditions. 248 | 249 | ## Code of Conduct 250 | 251 | Contribution to this crate is organized under the terms of the [Rust Code of 252 | Conduct][coc], the maintainer of this crate, the [Tools team][team], promises 253 | to intervene to uphold that code of conduct. 254 | 255 | [coc]: CODE_OF_CONDUCT.md 256 | [embassy]: https://embassy.dev/ 257 | [team]: https://github.com/rust-embedded/wg#the-tools-team 258 | -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::error::Error; 3 | use std::fs::File; 4 | use std::io::Write; 5 | use std::path::PathBuf; 6 | use std::process::Command; 7 | 8 | struct IgnoredError {} 9 | 10 | impl From for IgnoredError 11 | where 12 | E: Error, 13 | { 14 | fn from(_: E) -> IgnoredError { 15 | IgnoredError {} 16 | } 17 | } 18 | 19 | fn main() { 20 | println!("cargo:rerun-if-changed=build.rs"); 21 | 22 | let out_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap()); 23 | 24 | File::create(out_dir.join("commit-info.txt")) 25 | .unwrap() 26 | .write_all(commit_info().as_bytes()) 27 | .unwrap(); 28 | } 29 | 30 | fn commit_info() -> String { 31 | match (commit_hash(), commit_date()) { 32 | (Ok(hash), Ok(date)) => format!(" ({} {})", hash.trim(), date.trim()), 33 | _ => String::new(), 34 | } 35 | } 36 | 37 | fn commit_hash() -> Result { 38 | Ok(String::from_utf8( 39 | Command::new("git") 40 | .args(["rev-parse", "--short", "HEAD"]) 41 | .output()? 42 | .stdout, 43 | )?) 44 | } 45 | 46 | fn commit_date() -> Result { 47 | Ok(String::from_utf8( 48 | Command::new("git") 49 | .args(["log", "-1", "--date=short", "--pretty=format:%cd"]) 50 | .output()? 51 | .stdout, 52 | )?) 53 | } 54 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | # Before upgrading check that everything is available on all tier1 targets here: 2 | # https://rust-lang.github.io/rustup-components-history 3 | [toolchain] 4 | channel = "1.82" 5 | components = [ "rust-src", "rustfmt" ] 6 | -------------------------------------------------------------------------------- /src/generate/block.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use proc_macro2::TokenStream; 3 | use proc_macro2::{Ident, Span}; 4 | use quote::quote; 5 | 6 | use crate::ir::*; 7 | use crate::util; 8 | 9 | use super::sorted; 10 | 11 | pub fn render(opts: &super::Options, ir: &IR, b: &Block, path: &str) -> Result { 12 | let common_path = opts.common_path(); 13 | 14 | let span = Span::call_site(); 15 | let mut items = TokenStream::new(); 16 | 17 | for i in sorted(&b.items, |i| (i.byte_offset, i.name.clone())) { 18 | let name = Ident::new(&i.name, span); 19 | let offset = util::hex_usize(i.byte_offset as u64); 20 | 21 | let doc = util::doc(&i.description); 22 | 23 | match &i.inner { 24 | BlockItemInner::Register(r) => { 25 | let reg_ty = if let Some(fieldset_path) = &r.fieldset { 26 | let _f = ir.fieldsets.get(fieldset_path).unwrap(); 27 | util::relative_path(fieldset_path, path) 28 | } else { 29 | match r.bit_size { 30 | 8 => quote!(u8), 31 | 16 => quote!(u16), 32 | 32 => quote!(u32), 33 | 64 => quote!(u64), 34 | _ => panic!("Invalid register bit size {}", r.bit_size), 35 | } 36 | }; 37 | 38 | let access = match r.access { 39 | Access::Read => quote!(#common_path::R), 40 | Access::Write => quote!(#common_path::W), 41 | Access::ReadWrite => quote!(#common_path::RW), 42 | }; 43 | 44 | let ty = quote!(#common_path::Reg<#reg_ty, #access>); 45 | if let Some(array) = &i.array { 46 | let (len, offs_expr) = super::process_array(array); 47 | items.extend(quote!( 48 | #doc 49 | #[inline(always)] 50 | pub const fn #name(self, n: usize) -> #ty { 51 | assert!(n < #len); 52 | unsafe { #common_path::Reg::from_ptr(self.ptr.add(#offset + #offs_expr) as _) } 53 | } 54 | )); 55 | } else { 56 | items.extend(quote!( 57 | #doc 58 | #[inline(always)] 59 | pub const fn #name(self) -> #ty { 60 | unsafe { #common_path::Reg::from_ptr(self.ptr.add(#offset) as _) } 61 | } 62 | )); 63 | } 64 | } 65 | BlockItemInner::Block(b) => { 66 | let block_path = &b.block; 67 | let _b2 = ir.blocks.get(block_path).unwrap(); 68 | let ty = util::relative_path(block_path, path); 69 | if let Some(array) = &i.array { 70 | let (len, offs_expr) = super::process_array(array); 71 | 72 | items.extend(quote!( 73 | #doc 74 | #[inline(always)] 75 | pub const fn #name(self, n: usize) -> #ty { 76 | assert!(n < #len); 77 | unsafe { #ty::from_ptr(self.ptr.add(#offset + #offs_expr) as _) } 78 | } 79 | )); 80 | } else { 81 | items.extend(quote!( 82 | #doc 83 | #[inline(always)] 84 | pub const fn #name(self) -> #ty { 85 | unsafe { #ty::from_ptr(self.ptr.add(#offset) as _) } 86 | } 87 | )); 88 | } 89 | } 90 | } 91 | } 92 | 93 | let (_, name) = super::split_path(path); 94 | let name = Ident::new(name, span); 95 | let doc = util::doc(&b.description); 96 | let out = quote! { 97 | #doc 98 | #[derive(Copy, Clone, Eq, PartialEq)] 99 | pub struct #name { 100 | ptr: *mut u8 101 | } 102 | unsafe impl Send for #name {} 103 | unsafe impl Sync for #name {} 104 | impl #name { 105 | #[inline(always)] 106 | pub const unsafe fn from_ptr(ptr: *mut ()) -> Self { 107 | Self { 108 | ptr: ptr as _, 109 | } 110 | } 111 | 112 | #[inline(always)] 113 | pub const fn as_ptr(&self) -> *mut () { 114 | self.ptr as _ 115 | } 116 | 117 | #items 118 | } 119 | }; 120 | 121 | Ok(out) 122 | } 123 | -------------------------------------------------------------------------------- /src/generate/common.rs: -------------------------------------------------------------------------------- 1 | use core::marker::PhantomData; 2 | 3 | #[derive(Copy, Clone, PartialEq, Eq)] 4 | pub struct RW; 5 | #[derive(Copy, Clone, PartialEq, Eq)] 6 | pub struct R; 7 | #[derive(Copy, Clone, PartialEq, Eq)] 8 | pub struct W; 9 | 10 | mod sealed { 11 | use super::*; 12 | pub trait Access {} 13 | impl Access for R {} 14 | impl Access for W {} 15 | impl Access for RW {} 16 | } 17 | 18 | pub trait Access: sealed::Access + Copy {} 19 | impl Access for R {} 20 | impl Access for W {} 21 | impl Access for RW {} 22 | 23 | pub trait Read: Access {} 24 | impl Read for RW {} 25 | impl Read for R {} 26 | 27 | pub trait Write: Access {} 28 | impl Write for RW {} 29 | impl Write for W {} 30 | 31 | #[derive(Copy, Clone, PartialEq, Eq)] 32 | pub struct Reg { 33 | ptr: *mut u8, 34 | phantom: PhantomData<*mut (T, A)>, 35 | } 36 | unsafe impl Send for Reg {} 37 | unsafe impl Sync for Reg {} 38 | 39 | impl Reg { 40 | #[allow(clippy::missing_safety_doc)] 41 | #[inline(always)] 42 | pub const unsafe fn from_ptr(ptr: *mut T) -> Self { 43 | Self { 44 | ptr: ptr as _, 45 | phantom: PhantomData, 46 | } 47 | } 48 | 49 | #[inline(always)] 50 | pub const fn as_ptr(&self) -> *mut T { 51 | self.ptr as _ 52 | } 53 | } 54 | 55 | impl Reg { 56 | #[inline(always)] 57 | pub fn read(&self) -> T { 58 | unsafe { (self.ptr as *mut T).read_volatile() } 59 | } 60 | } 61 | 62 | impl Reg { 63 | #[inline(always)] 64 | pub fn write_value(&self, val: T) { 65 | unsafe { (self.ptr as *mut T).write_volatile(val) } 66 | } 67 | } 68 | 69 | impl Reg { 70 | #[inline(always)] 71 | pub fn write(&self, f: impl FnOnce(&mut T)) { 72 | let mut val = Default::default(); 73 | f(&mut val); 74 | self.write_value(val); 75 | } 76 | } 77 | 78 | impl Reg { 79 | #[inline(always)] 80 | pub fn modify(&self, f: impl FnOnce(&mut T)) { 81 | let mut val = self.read(); 82 | f(&mut val); 83 | self.write_value(val); 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /src/generate/device.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Write as _; 2 | 3 | use anyhow::Result; 4 | use proc_macro2::{Ident, Span, TokenStream}; 5 | use quote::quote; 6 | 7 | use crate::ir::*; 8 | use crate::util::{self, StringExt}; 9 | 10 | use super::sorted; 11 | 12 | pub fn render_device_x(_ir: &IR, d: &Device) -> Result { 13 | let mut device_x = String::new(); 14 | for i in sorted(&d.interrupts, |i| i.value) { 15 | writeln!(&mut device_x, "PROVIDE({} = DefaultHandler);", i.name).unwrap(); 16 | } 17 | Ok(device_x) 18 | } 19 | 20 | pub fn render(opts: &super::Options, ir: &IR, d: &Device, path: &str) -> Result { 21 | let mut out = TokenStream::new(); 22 | let span = Span::call_site(); 23 | 24 | let mut interrupts = TokenStream::new(); 25 | let mut peripherals = TokenStream::new(); 26 | let mut vectors = TokenStream::new(); 27 | let mut names = vec![]; 28 | 29 | let mut pos = 0; 30 | for i in sorted(&d.interrupts, |i| i.value) { 31 | while pos < i.value { 32 | vectors.extend(quote!(Vector { _reserved: 0 },)); 33 | pos += 1; 34 | } 35 | pos += 1; 36 | 37 | let name_uc = Ident::new(&i.name.to_sanitized_upper_case(), span); 38 | let description = format!( 39 | "{} - {}", 40 | i.value, 41 | i.description 42 | .as_ref() 43 | .map(|s| util::respace(s)) 44 | .as_ref() 45 | .map(|s| util::escape_brackets(s)) 46 | .unwrap_or_else(|| i.name.clone()) 47 | ); 48 | 49 | let value = util::unsuffixed(i.value as u64); 50 | 51 | interrupts.extend(quote! { 52 | #[doc = #description] 53 | #name_uc = #value, 54 | }); 55 | vectors.extend(quote!(Vector { _handler: #name_uc },)); 56 | names.push(name_uc); 57 | } 58 | 59 | for p in sorted(&d.peripherals, |p| p.base_address) { 60 | let name = Ident::new(&p.name, span); 61 | let address = util::hex_usize(p.base_address); 62 | let doc = util::doc(&p.description); 63 | 64 | if let Some(block_name) = &p.block { 65 | let _b = ir.blocks.get(block_name); 66 | let path = util::relative_path(block_name, path); 67 | 68 | peripherals.extend(quote! { 69 | #doc 70 | pub const #name: #path = unsafe { #path::from_ptr(#address as _) }; 71 | }); 72 | } else { 73 | peripherals.extend(quote! { 74 | #doc 75 | pub const #name: *mut () = #address as _; 76 | }); 77 | } 78 | } 79 | let n = util::unsuffixed(pos as u64); 80 | 81 | let defmt = opts.defmt_feature.as_ref().map(|defmt_feature| { 82 | quote! { 83 | #[cfg_attr(feature = #defmt_feature, derive(defmt::Format))] 84 | } 85 | }); 86 | 87 | out.extend(quote!( 88 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 89 | #defmt 90 | pub enum Interrupt { 91 | #interrupts 92 | } 93 | 94 | unsafe impl cortex_m::interrupt::InterruptNumber for Interrupt { 95 | #[inline(always)] 96 | fn number(self) -> u16 { 97 | self as u16 98 | } 99 | } 100 | 101 | #[cfg(feature = "rt")] 102 | mod _vectors { 103 | unsafe extern "C" { 104 | #(fn #names();)* 105 | } 106 | 107 | pub union Vector { 108 | _handler: unsafe extern "C" fn(), 109 | _reserved: u32, 110 | } 111 | 112 | #[unsafe(link_section = ".vector_table.interrupts")] 113 | #[unsafe(no_mangle)] 114 | pub static __INTERRUPTS: [Vector; #n] = [ 115 | #vectors 116 | ]; 117 | } 118 | 119 | #peripherals 120 | )); 121 | 122 | if let Some(nvic_priority_bits) = d.nvic_priority_bits { 123 | let bits = util::unsuffixed(u64::from(nvic_priority_bits)); 124 | out.extend(quote! { 125 | /// Number available in the NVIC for configuring priority 126 | #[cfg(feature = "rt")] 127 | pub const NVIC_PRIO_BITS: u8 = #bits; 128 | }); 129 | } 130 | 131 | out.extend(quote! { 132 | #[cfg(feature = "rt")] 133 | pub use cortex_m_rt::interrupt; 134 | #[cfg(feature = "rt")] 135 | pub use Interrupt as interrupt; 136 | }); 137 | 138 | Ok(out) 139 | } 140 | -------------------------------------------------------------------------------- /src/generate/enumm.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use anyhow::Result; 4 | use proc_macro2::TokenStream; 5 | use proc_macro2::{Ident, Span}; 6 | use quote::quote; 7 | 8 | use crate::ir::*; 9 | use crate::util; 10 | 11 | use super::sorted; 12 | 13 | pub fn render(opts: &super::Options, _ir: &IR, e: &Enum, path: &str) -> Result { 14 | let span = Span::call_site(); 15 | 16 | // For very "sparse" enums, generate a newtype wrapping the uX. 17 | // In particular, we generate a newtype if: 18 | // - there'd be 100 or more "reserved" cases, AND 19 | // - there'd be 50% or more "reserved" cases. 20 | let variant_count = e.variants.len() as u64; 21 | let reserved_count = (1u64 << e.bit_size) - variant_count; 22 | let newtype = reserved_count >= 100 && reserved_count >= variant_count; 23 | 24 | let ty = match e.bit_size { 25 | 1..=8 => quote!(u8), 26 | 9..=16 => quote!(u16), 27 | 17..=32 => quote!(u32), 28 | 33..=64 => quote!(u64), 29 | _ => panic!("Invalid bit_size {}", e.bit_size), 30 | }; 31 | 32 | let (_, name) = super::split_path(path); 33 | let name = Ident::new(name, span); 34 | let doc = util::doc(&e.description); 35 | let mask = util::hex(1u64.wrapping_shl(e.bit_size).wrapping_sub(1)); 36 | 37 | let mut out = TokenStream::new(); 38 | 39 | if newtype { 40 | let mut items = TokenStream::new(); 41 | let mut item_names_str = Vec::with_capacity(e.variants.len()); 42 | let mut item_values = Vec::with_capacity(e.variants.len()); 43 | 44 | for f in sorted(&e.variants, |f| (f.value, f.name.clone())) { 45 | let name = Ident::new(&f.name, span); 46 | let value = util::hex(f.value); 47 | 48 | item_names_str.push(&f.name); 49 | item_values.push(value.clone()); 50 | 51 | let doc = util::doc(&f.description); 52 | items.extend(quote!( 53 | #doc 54 | pub const #name: Self = Self(#value); 55 | )); 56 | } 57 | 58 | let defmt = opts.defmt_feature.as_ref().map(|defmt_feature| { 59 | quote! { 60 | #[cfg(feature = #defmt_feature)] 61 | impl defmt::Format for #name { 62 | fn format(&self, f: defmt::Formatter) { 63 | match self.0 { 64 | #( 65 | #item_values => defmt::write!(f, #item_names_str), 66 | )* 67 | other => defmt::write!(f, "0x{:02X}", other), 68 | } 69 | } 70 | } 71 | } 72 | }); 73 | 74 | out.extend(quote! { 75 | #doc 76 | #[repr(transparent)] 77 | #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] 78 | pub struct #name (#ty); 79 | 80 | impl #name { 81 | #items 82 | } 83 | 84 | impl #name { 85 | pub const fn from_bits(val: #ty) -> #name { 86 | Self(val & #mask) 87 | } 88 | 89 | pub const fn to_bits(self) -> #ty { 90 | self.0 91 | } 92 | } 93 | 94 | impl core::fmt::Debug for #name { 95 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 96 | match self.0 { 97 | #( 98 | #item_values => f.write_str(#item_names_str), 99 | )* 100 | other => core::write!(f, "0x{:02X}", other), 101 | } 102 | } 103 | } 104 | 105 | #defmt 106 | }); 107 | } else { 108 | let variants: BTreeMap<_, _> = e.variants.iter().map(|v| (v.value, v)).collect(); 109 | let mut items = TokenStream::new(); 110 | for val in 0..(1 << e.bit_size) { 111 | if let Some(f) = variants.get(&val) { 112 | let name = Ident::new(&f.name, span); 113 | let value = util::hex(f.value); 114 | let doc = util::doc(&f.description); 115 | items.extend(quote!( 116 | #doc 117 | #name = #value, 118 | )); 119 | } else { 120 | let name = Ident::new(&format!("_RESERVED_{:x}", val), span); 121 | let value = util::hex(val); 122 | items.extend(quote!( 123 | #name = #value, 124 | )); 125 | } 126 | } 127 | 128 | let defmt = opts.defmt_feature.as_ref().map(|defmt_feature| { 129 | quote! { 130 | #[cfg_attr(feature = #defmt_feature, derive(defmt::Format))] 131 | } 132 | }); 133 | 134 | out.extend(quote! { 135 | #doc 136 | #[repr(#ty)] 137 | #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)] 138 | #defmt 139 | pub enum #name { 140 | #items 141 | } 142 | 143 | impl #name { 144 | #[inline(always)] 145 | pub const fn from_bits(val: #ty) -> #name { 146 | unsafe { core::mem::transmute(val & #mask) } 147 | } 148 | 149 | #[inline(always)] 150 | pub const fn to_bits(self) -> #ty { 151 | unsafe { core::mem::transmute(self) } 152 | } 153 | } 154 | }); 155 | } 156 | 157 | out.extend(quote! { 158 | impl From<#ty> for #name { 159 | #[inline(always)] 160 | fn from(val: #ty) -> #name { 161 | #name::from_bits(val) 162 | } 163 | } 164 | 165 | impl From<#name> for #ty { 166 | #[inline(always)] 167 | fn from(val: #name) -> #ty { 168 | #name::to_bits(val) 169 | } 170 | } 171 | }); 172 | 173 | Ok(out) 174 | } 175 | -------------------------------------------------------------------------------- /src/generate/fieldset.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use proc_macro2::TokenStream; 3 | use proc_macro2::{Ident, Span}; 4 | use quote::quote; 5 | 6 | use crate::ir::*; 7 | use crate::util; 8 | 9 | use super::sorted; 10 | 11 | pub fn render(opts: &super::Options, ir: &IR, fs: &FieldSet, path: &str) -> Result { 12 | let span = Span::call_site(); 13 | let mut items = TokenStream::new(); 14 | let mut field_names = Vec::with_capacity(fs.fields.len()); 15 | let mut field_getters = Vec::with_capacity(fs.fields.len()); 16 | let mut field_types = Vec::with_capacity(fs.fields.len()); 17 | 18 | let ty = match fs.bit_size { 19 | 1..=8 => quote!(u8), 20 | 9..=16 => quote!(u16), 21 | 17..=32 => quote!(u32), 22 | 33..=64 => quote!(u64), 23 | _ => panic!("Invalid bit_size {}", fs.bit_size), 24 | }; 25 | 26 | for f in sorted(&fs.fields, |f| (f.bit_offset.clone(), f.name.clone())) { 27 | let name = Ident::new(&f.name, span); 28 | let name_set = Ident::new(&format!("set_{}", f.name), span); 29 | let off_in_reg = f.bit_offset.clone(); 30 | let _bit_size = f.bit_size as usize; 31 | let mask = util::hex(1u64.wrapping_shl(f.bit_size).wrapping_sub(1)); 32 | let doc = util::doc(&f.description); 33 | let field_ty: TokenStream; 34 | let to_bits: TokenStream; 35 | let from_bits: TokenStream; 36 | 37 | if let Some(e_path) = &f.enumm { 38 | let Some(e) = ir.enums.get(e_path) else { 39 | panic!("missing enum {}", e_path); 40 | }; 41 | 42 | let enum_ty = match e.bit_size { 43 | 1..=8 => quote!(u8), 44 | 9..=16 => quote!(u16), 45 | 17..=32 => quote!(u32), 46 | 33..=64 => quote!(u64), 47 | _ => panic!("Invalid bit_size {}", e.bit_size), 48 | }; 49 | 50 | field_ty = util::relative_path(e_path, path); 51 | to_bits = quote!(val.to_bits() as #ty); 52 | from_bits = quote!(#field_ty::from_bits(val as #enum_ty)); 53 | } else { 54 | field_ty = match f.bit_size { 55 | 1 => quote!(bool), 56 | 2..=8 => quote!(u8), 57 | 9..=16 => quote!(u16), 58 | 17..=32 => quote!(u32), 59 | 33..=64 => quote!(u64), 60 | _ => panic!("Invalid bit_size {}", f.bit_size), 61 | }; 62 | to_bits = quote!(val as #ty); 63 | from_bits = if f.bit_size == 1 { 64 | quote!(val != 0) 65 | } else { 66 | quote!(val as #field_ty) 67 | } 68 | } 69 | 70 | if let Some(array) = &f.array { 71 | // Print array fields using array indexing: "field[0]" 72 | for i in 0..array.len() { 73 | let debug_name = format!("{}[{i}]", f.name); 74 | field_names.push(debug_name); 75 | field_types.push(field_ty.clone()); 76 | field_getters.push(quote!(self.#name(#i))); 77 | } 78 | } else { 79 | field_names.push(f.name.clone()); 80 | field_types.push(field_ty.clone()); 81 | field_getters.push(quote!(self.#name())); 82 | } 83 | 84 | match off_in_reg { 85 | BitOffset::Regular(off_in_reg) => { 86 | let off_in_reg = off_in_reg as usize; 87 | if let Some(array) = &f.array { 88 | let (len, offs_expr) = super::process_array(array); 89 | items.extend(quote!( 90 | #doc 91 | #[must_use] 92 | #[inline(always)] 93 | pub const fn #name(&self, n: usize) -> #field_ty{ 94 | assert!(n < #len); 95 | let offs = #off_in_reg + #offs_expr; 96 | let val = (self.0 >> offs) & #mask; 97 | #from_bits 98 | } 99 | #doc 100 | #[inline(always)] 101 | pub const fn #name_set(&mut self, n: usize, val: #field_ty) { 102 | assert!(n < #len); 103 | let offs = #off_in_reg + #offs_expr; 104 | self.0 = (self.0 & !(#mask << offs)) | (((#to_bits) & #mask) << offs); 105 | } 106 | )); 107 | } else { 108 | items.extend(quote!( 109 | #doc 110 | #[must_use] 111 | #[inline(always)] 112 | pub const fn #name(&self) -> #field_ty{ 113 | let val = (self.0 >> #off_in_reg) & #mask; 114 | #from_bits 115 | } 116 | #doc 117 | #[inline(always)] 118 | pub const fn #name_set(&mut self, val: #field_ty) { 119 | self.0 = (self.0 & !(#mask << #off_in_reg)) | (((#to_bits) & #mask) << #off_in_reg); 120 | } 121 | )); 122 | } 123 | } 124 | BitOffset::Cursed(ranges) => { 125 | // offset of "range"s inside register 126 | let mut off_in_reg: Vec = Vec::new(); 127 | let mut mask: Vec = Vec::new(); 128 | // offset to shift "range" value to final value 129 | // preload first offset as 0, 130 | // since we order "range" from less to larger, first offset-in-value should always be 0. 131 | let mut off_in_val: Vec = vec![0]; 132 | for (index, range) in ranges.iter().enumerate() { 133 | off_in_reg.push(*range.start() as usize); 134 | mask.push(util::hex( 135 | 1u64.wrapping_shl(range.end() - range.start() + 1) 136 | .wrapping_sub(1), 137 | )); 138 | 139 | // prepare next "range" offset-in-value value 140 | if index < ranges.len() - 1 { 141 | off_in_val 142 | .push(off_in_val[index] + ((range.end() - range.start()) as usize + 1)) 143 | } 144 | } 145 | 146 | if let Some(array) = &f.array { 147 | let (len, offs_expr) = super::process_array(array); 148 | items.extend(quote!( 149 | #doc 150 | #[must_use] 151 | #[inline(always)] 152 | pub const fn #name(&self, n: usize) -> #field_ty{ 153 | assert!(n < #len); 154 | let mut val = 0; 155 | #( let offs = #off_in_reg + #offs_expr; 156 | val += (((self.0 >> offs) & #mask) << #off_in_val); )*; 157 | #from_bits 158 | } 159 | #doc 160 | #[inline(always)] 161 | pub const fn #name_set(&mut self, n: usize, val: #field_ty) { 162 | assert!(n < #len); 163 | #( let offs = #off_in_reg + #offs_expr; 164 | self.0 = (self.0 & !(#mask << offs)) | (((#to_bits >> #off_in_val) & #mask) << offs); )*; 165 | } 166 | )); 167 | } else { 168 | items.extend(quote!( 169 | #doc 170 | #[must_use] 171 | #[inline(always)] 172 | pub const fn #name(&self) -> #field_ty{ 173 | let mut val = 0; 174 | #( val += (((self.0 >> #off_in_reg) & #mask) << #off_in_val); )*; 175 | #from_bits 176 | } 177 | #doc 178 | #[inline(always)] 179 | pub const fn #name_set(&mut self, val: #field_ty) { 180 | #( self.0 = (self.0 & !(#mask << #off_in_reg)) | (((#to_bits >> #off_in_val) & #mask) << #off_in_reg); )*; 181 | } 182 | )) 183 | } 184 | } 185 | }; 186 | } 187 | 188 | let (_, name) = super::split_path(path); 189 | let name_str = name; 190 | let name = Ident::new(name, span); 191 | let doc = util::doc(&fs.description); 192 | 193 | let impl_defmt_format = opts.defmt_feature.as_ref().map(|defmt_feature| { 194 | let mut defmt_format_string = String::new(); 195 | defmt_format_string.push_str(name_str); 196 | defmt_format_string.push_str(" {{"); 197 | for (i, (field_name, field_type)) in field_names.iter().zip(&field_types).enumerate() { 198 | if i > 0 { 199 | defmt_format_string.push_str(", "); 200 | } else { 201 | defmt_format_string.push_str(" "); 202 | } 203 | defmt_format_string.push_str(field_name); 204 | 205 | if is_defmt_primitive_type(field_type) { 206 | defmt_format_string.push_str(": {="); 207 | defmt_format_string.push_str(&field_type.to_string()); 208 | defmt_format_string.push_str(":?}"); 209 | } else { 210 | defmt_format_string.push_str(": {:?}"); 211 | } 212 | } 213 | defmt_format_string.push_str(" }}"); 214 | 215 | quote! { 216 | #[cfg(feature = #defmt_feature)] 217 | impl defmt::Format for #name { 218 | fn format(&self, f: defmt::Formatter) { 219 | defmt::write!(f, #defmt_format_string, #(#field_getters),*) 220 | } 221 | } 222 | } 223 | }); 224 | 225 | let out = quote! { 226 | #doc 227 | #[repr(transparent)] 228 | #[derive(Copy, Clone, Eq, PartialEq)] 229 | pub struct #name (pub #ty); 230 | 231 | impl #name { 232 | #items 233 | } 234 | 235 | impl Default for #name { 236 | #[inline(always)] 237 | fn default() -> #name { 238 | #name(0) 239 | } 240 | } 241 | 242 | impl core::fmt::Debug for #name { 243 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 244 | f.debug_struct(#name_str) 245 | #( 246 | .field(#field_names, &#field_getters) 247 | )* 248 | .finish() 249 | } 250 | } 251 | 252 | #impl_defmt_format 253 | }; 254 | 255 | Ok(out) 256 | } 257 | 258 | fn is_defmt_primitive_type(ty: &TokenStream) -> bool { 259 | // Supported by defmt but not included: [u8; N], [u8] and str. 260 | // Parsing these types is more complicated, so we skip them. 261 | // They should never occur as the field of a fieldset, 262 | // so this should not be a problem. 263 | let primitives = [ 264 | "bool", "u8", "i8", "u16", "i16", "u32", "i32", "u64", "i64", "u128", "i128", "f32", "f64", 265 | ]; 266 | primitives.as_slice().contains(&ty.to_string().as_str()) 267 | } 268 | -------------------------------------------------------------------------------- /src/generate/mod.rs: -------------------------------------------------------------------------------- 1 | mod block; 2 | mod device; 3 | mod enumm; 4 | mod fieldset; 5 | 6 | use anyhow::Result; 7 | use proc_macro2::{Ident, Span, TokenStream}; 8 | use quote::quote; 9 | use std::collections::BTreeMap; 10 | use std::str::FromStr; 11 | 12 | use crate::ir::*; 13 | 14 | pub use device::render_device_x; 15 | 16 | pub const COMMON_MODULE: &[u8] = include_bytes!("common.rs"); 17 | 18 | struct Module { 19 | items: TokenStream, 20 | children: BTreeMap, 21 | } 22 | 23 | impl Module { 24 | fn new() -> Self { 25 | Self { 26 | // Default mod contents 27 | items: quote!(), 28 | children: BTreeMap::new(), 29 | } 30 | } 31 | 32 | fn get_by_path(&mut self, path: &[&str]) -> &mut Module { 33 | if path.is_empty() { 34 | return self; 35 | } 36 | 37 | self.children 38 | .entry(path[0].to_owned()) 39 | .or_insert_with(Module::new) 40 | .get_by_path(&path[1..]) 41 | } 42 | 43 | fn render(&self) -> Result { 44 | let span = Span::call_site(); 45 | 46 | let mut res = TokenStream::new(); 47 | res.extend(self.items.clone()); 48 | 49 | for (name, module) in sorted_map(&self.children, |name, _| name.clone()) { 50 | let name = Ident::new(name, span); 51 | let contents = module.render()?; 52 | res.extend(quote! { 53 | pub mod #name { 54 | #contents 55 | } 56 | }); 57 | } 58 | Ok(res) 59 | } 60 | } 61 | 62 | #[derive(Debug, Default)] 63 | pub enum CommonModule { 64 | #[default] 65 | Builtin, 66 | External(TokenStream), 67 | } 68 | 69 | /// Options for the code generator. 70 | /// 71 | /// See the individual methods for the different options you can change. 72 | #[derive(Debug)] 73 | pub struct Options { 74 | common_module: CommonModule, 75 | defmt_feature: Option, 76 | } 77 | 78 | impl Default for Options { 79 | fn default() -> Self { 80 | Self::new() 81 | } 82 | } 83 | 84 | impl Options { 85 | /// Create new options with all values set to the default. 86 | /// 87 | /// This will use a builtin common module, 88 | /// and adds `defmt` support to the generated code gated behind a `feature = "defmt"` flag. 89 | pub fn new() -> Self { 90 | Self { 91 | common_module: CommonModule::Builtin, 92 | defmt_feature: Some("defmt".into()), 93 | } 94 | } 95 | 96 | /// Get the path to the common module. 97 | fn common_path(&self) -> TokenStream { 98 | match &self.common_module { 99 | CommonModule::Builtin => TokenStream::from_str("crate::common").unwrap(), 100 | CommonModule::External(path) => path.clone(), 101 | } 102 | } 103 | 104 | /// Get the configuration of the common module. 105 | pub fn common_module(&self) -> &CommonModule { 106 | &self.common_module 107 | } 108 | 109 | /// Set the common module to use. 110 | /// 111 | /// Specify [`CommonModule::Builtin`] for a built-in common module, 112 | /// or [`CommonModule::External`] to use an external common module. 113 | pub fn with_common_module(mut self, common_module: CommonModule) -> Self { 114 | self.common_module = common_module; 115 | self 116 | } 117 | 118 | /// Set the feature for adding defmt support in the generated code. 119 | /// 120 | /// You can fully remove `defmt` support in the generated code by specifying `None`. 121 | pub fn with_defmt_feature(mut self, defmt_feature: Option) -> Self { 122 | self.defmt_feature = defmt_feature; 123 | self 124 | } 125 | 126 | /// Get the feature flag used to enable/disable `defmt` support in the generated code. 127 | /// 128 | /// If set to `None`, no `defmt` support will be added at all to the generated code. 129 | pub fn defmt_feature(&self) -> Option<&str> { 130 | self.defmt_feature.as_deref() 131 | } 132 | } 133 | 134 | pub fn render(ir: &IR, opts: &Options) -> Result { 135 | let mut root = Module::new(); 136 | root.items = TokenStream::new(); // Remove default contents 137 | 138 | let commit_info = { 139 | let tmp = include_str!(concat!(env!("OUT_DIR"), "/commit-info.txt")); 140 | 141 | if tmp.is_empty() { 142 | " (untracked)" 143 | } else { 144 | tmp 145 | } 146 | }; 147 | 148 | let doc = format!( 149 | "Peripheral access API (generated using chiptool v{}{})", 150 | env!("CARGO_PKG_VERSION"), 151 | commit_info 152 | ); 153 | 154 | root.items.extend(quote!( 155 | #![allow(non_camel_case_types)] 156 | #![allow(non_snake_case)] 157 | #![no_std] 158 | #![doc=#doc] 159 | )); 160 | 161 | for (p, d) in sorted_map(&ir.devices, |name, _| name.clone()) { 162 | let (mods, _) = split_path(p); 163 | root.get_by_path(&mods) 164 | .items 165 | .extend(device::render(opts, ir, d, p)?); 166 | } 167 | 168 | for (p, b) in sorted_map(&ir.blocks, |name, _| name.clone()) { 169 | let (mods, _) = split_path(p); 170 | root.get_by_path(&mods) 171 | .items 172 | .extend(block::render(opts, ir, b, p)?); 173 | } 174 | 175 | for (p, fs) in sorted_map(&ir.fieldsets, |name, _| name.clone()) { 176 | let (mods, _) = split_path(p); 177 | root.get_by_path(&mods) 178 | .items 179 | .extend(fieldset::render(opts, ir, fs, p)?); 180 | } 181 | 182 | for (p, e) in sorted_map(&ir.enums, |name, _| name.clone()) { 183 | let (mods, _) = split_path(p); 184 | root.get_by_path(&mods) 185 | .items 186 | .extend(enumm::render(opts, ir, e, p)?); 187 | } 188 | 189 | match &opts.common_module { 190 | CommonModule::Builtin => { 191 | let tokens = 192 | TokenStream::from_str(std::str::from_utf8(COMMON_MODULE).unwrap()).unwrap(); 193 | 194 | let module = root.get_by_path(&["common"]); 195 | module.items = TokenStream::new(); // Remove default contents 196 | module.items.extend(tokens); 197 | } 198 | CommonModule::External(_) => {} 199 | } 200 | 201 | root.render() 202 | } 203 | 204 | fn split_path(s: &str) -> (Vec<&str>, &str) { 205 | let mut v: Vec<&str> = s.split("::").collect(); 206 | let n = v.pop().unwrap(); 207 | (v, n) 208 | } 209 | 210 | fn process_array(array: &Array) -> (usize, TokenStream) { 211 | match array { 212 | Array::Regular(array) => { 213 | let len = array.len as usize; 214 | let stride = array.stride as usize; 215 | let offs_expr = quote!(n*#stride); 216 | (len, offs_expr) 217 | } 218 | Array::Cursed(array) => { 219 | let len = array.offsets.len(); 220 | let offsets = array 221 | .offsets 222 | .iter() 223 | .map(|&x| x as usize) 224 | .collect::>(); 225 | let offs_expr = quote!(([#(#offsets),*][n] as usize)); 226 | (len, offs_expr) 227 | } 228 | } 229 | } 230 | 231 | fn sorted<'a, T: 'a, F, Z>( 232 | v: impl IntoIterator, 233 | by: F, 234 | ) -> impl IntoIterator 235 | where 236 | F: Fn(&T) -> Z, 237 | Z: Ord, 238 | { 239 | let mut v = v.into_iter().collect::>(); 240 | v.sort_by_key(|v| by(*v)); 241 | v 242 | } 243 | 244 | fn sorted_map<'a, K: 'a, V: 'a, F, Z>( 245 | v: impl IntoIterator, 246 | by: F, 247 | ) -> impl IntoIterator 248 | where 249 | F: Fn(&K, &V) -> Z, 250 | Z: Ord, 251 | { 252 | let mut v = v.into_iter().collect::>(); 253 | v.sort_by_key(|&(k, v)| by(k, v)); 254 | v 255 | } 256 | -------------------------------------------------------------------------------- /src/ir.rs: -------------------------------------------------------------------------------- 1 | use de::MapAccess; 2 | use serde::{de, de::Visitor, ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer}; 3 | use std::collections::BTreeMap; 4 | use std::fmt; 5 | use std::ops::RangeInclusive; 6 | 7 | #[derive(Default, Clone, Debug, PartialEq)] 8 | pub struct IR { 9 | pub devices: BTreeMap, 10 | pub blocks: BTreeMap, 11 | pub fieldsets: BTreeMap, 12 | pub enums: BTreeMap, 13 | } 14 | 15 | impl IR { 16 | pub fn new() -> Self { 17 | Self::default() 18 | } 19 | 20 | pub fn merge(&mut self, other: IR) { 21 | self.devices.extend(other.devices); 22 | self.blocks.extend(other.blocks); 23 | self.fieldsets.extend(other.fieldsets); 24 | self.enums.extend(other.enums); 25 | } 26 | } 27 | 28 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 29 | pub struct Device { 30 | #[serde(default, skip_serializing_if = "Option::is_none")] 31 | pub nvic_priority_bits: Option, 32 | pub peripherals: Vec, 33 | pub interrupts: Vec, 34 | } 35 | 36 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 37 | pub struct Peripheral { 38 | pub name: String, 39 | #[serde(default, skip_serializing_if = "Option::is_none")] 40 | pub description: Option, 41 | pub base_address: u64, 42 | #[serde(default, skip_serializing_if = "Option::is_none")] 43 | pub array: Option, 44 | 45 | #[serde(default, skip_serializing_if = "Option::is_none")] 46 | pub block: Option, 47 | 48 | #[serde( 49 | default, 50 | skip_serializing_if = "BTreeMap::is_empty", 51 | serialize_with = "ordered_map" 52 | )] 53 | pub interrupts: BTreeMap, 54 | } 55 | 56 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 57 | pub struct Interrupt { 58 | pub name: String, 59 | #[serde(default, skip_serializing_if = "Option::is_none")] 60 | pub description: Option, 61 | pub value: u32, 62 | } 63 | 64 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 65 | pub struct Block { 66 | #[serde(default, skip_serializing_if = "Option::is_none")] 67 | pub extends: Option, 68 | 69 | #[serde(default, skip_serializing_if = "Option::is_none")] 70 | pub description: Option, 71 | pub items: Vec, 72 | } 73 | 74 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 75 | pub struct BlockItem { 76 | pub name: String, 77 | #[serde(default, skip_serializing_if = "Option::is_none")] 78 | pub description: Option, 79 | 80 | #[serde(default, skip_serializing_if = "Option::is_none")] 81 | pub array: Option, 82 | pub byte_offset: u32, 83 | 84 | #[serde(flatten)] 85 | pub inner: BlockItemInner, 86 | } 87 | 88 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 89 | #[serde(untagged)] 90 | pub enum BlockItemInner { 91 | Block(BlockItemBlock), 92 | Register(Register), 93 | } 94 | 95 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 96 | #[serde(untagged)] 97 | pub enum Array { 98 | Regular(RegularArray), 99 | Cursed(CursedArray), 100 | } 101 | 102 | impl Array { 103 | /// Get the number of elements in the array. 104 | pub fn len(&self) -> usize { 105 | match self { 106 | Self::Regular(x) => x.len as usize, 107 | Self::Cursed(x) => x.offsets.len(), 108 | } 109 | } 110 | } 111 | 112 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 113 | pub struct RegularArray { 114 | pub len: u32, 115 | pub stride: u32, 116 | } 117 | 118 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 119 | pub struct CursedArray { 120 | pub offsets: Vec, 121 | } 122 | 123 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 124 | pub struct Register { 125 | #[serde(default = "default_readwrite", skip_serializing_if = "is_readwrite")] 126 | pub access: Access, 127 | #[serde(default = "default_32", skip_serializing_if = "is_32")] 128 | pub bit_size: u32, 129 | #[serde(default, skip_serializing_if = "Option::is_none")] 130 | pub fieldset: Option, 131 | } 132 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 133 | pub struct BlockItemBlock { 134 | pub block: String, 135 | } 136 | 137 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 138 | pub enum Access { 139 | ReadWrite, 140 | Read, 141 | Write, 142 | } 143 | 144 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 145 | pub struct FieldSet { 146 | #[serde(default, skip_serializing_if = "Option::is_none")] 147 | pub extends: Option, 148 | 149 | #[serde(default, skip_serializing_if = "Option::is_none")] 150 | pub description: Option, 151 | #[serde(default = "default_32", skip_serializing_if = "is_32")] 152 | pub bit_size: u32, 153 | pub fields: Vec, 154 | } 155 | 156 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Eq)] 157 | #[serde(untagged)] 158 | pub enum BitOffset { 159 | Regular(u32), 160 | // This vector assume all RangeInclusive is non-overlapped and sorted. 161 | // It should be checked when parse source files. 162 | Cursed(Vec>), 163 | } 164 | 165 | impl BitOffset { 166 | pub(crate) fn min_offset(&self) -> u32 { 167 | match self { 168 | BitOffset::Regular(offset) => *offset, 169 | BitOffset::Cursed(ranges) => *ranges[0].start(), 170 | } 171 | } 172 | 173 | pub(crate) fn max_offset(&self) -> u32 { 174 | match self { 175 | BitOffset::Regular(offset) => *offset, 176 | BitOffset::Cursed(ranges) => *ranges[ranges.len() - 1].end(), 177 | } 178 | } 179 | 180 | pub(crate) fn into_ranges(self, bit_size: u32) -> Vec> { 181 | match self { 182 | BitOffset::Regular(offset) => vec![offset..=offset + bit_size - 1], 183 | BitOffset::Cursed(ranges) => ranges, 184 | } 185 | } 186 | } 187 | 188 | // Custom bit offset ordering: 189 | // 1. Compare min offset: less is less, greater is greater. If min offset is equal, 190 | // 2. Compare max offset: less is less, greater is greater, equal is equal 191 | impl Ord for BitOffset { 192 | fn cmp(&self, other: &Self) -> std::cmp::Ordering { 193 | use std::cmp::Ordering; 194 | 195 | let min_order = self.min_offset().cmp(&other.min_offset()); 196 | match min_order { 197 | Ordering::Equal => self.max_offset().cmp(&other.max_offset()), 198 | min_order => min_order, 199 | } 200 | } 201 | } 202 | 203 | impl PartialOrd for BitOffset { 204 | fn partial_cmp(&self, other: &Self) -> Option { 205 | Some(self.cmp(other)) 206 | } 207 | } 208 | 209 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 210 | pub struct Field { 211 | pub name: String, 212 | #[serde(default, skip_serializing_if = "Option::is_none")] 213 | pub description: Option, 214 | pub bit_offset: BitOffset, 215 | pub bit_size: u32, 216 | #[serde(default, skip_serializing_if = "Option::is_none")] 217 | pub array: Option, 218 | #[serde(default, skip_serializing_if = "Option::is_none", rename = "enum")] 219 | pub enumm: Option, 220 | } 221 | 222 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 223 | pub struct Enum { 224 | #[serde(default, skip_serializing_if = "Option::is_none")] 225 | pub description: Option, 226 | pub bit_size: u32, 227 | pub variants: Vec, 228 | } 229 | 230 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 231 | pub struct EnumVariant { 232 | pub name: String, 233 | #[serde(default, skip_serializing_if = "Option::is_none")] 234 | pub description: Option, 235 | pub value: u64, 236 | } 237 | 238 | fn default_32() -> u32 { 239 | 32 240 | } 241 | fn is_32(x: &u32) -> bool { 242 | *x == 32 243 | } 244 | 245 | fn default_readwrite() -> Access { 246 | Access::ReadWrite 247 | } 248 | fn is_readwrite(x: &Access) -> bool { 249 | *x == Access::ReadWrite 250 | } 251 | 252 | #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] 253 | enum Kind { 254 | Device, 255 | Block, 256 | Fieldset, 257 | Enum, 258 | } 259 | 260 | impl Serialize for IR { 261 | fn serialize(&self, serializer: S) -> Result 262 | where 263 | S: Serializer, 264 | { 265 | // Sort by block/fieldset/enum, then alphabetically. 266 | // This ensures the output's order is deterministic. 267 | // - Easier diffing between yamls 268 | // - No spurious changes when roundtripping 269 | let mut entries = Vec::new(); 270 | for name in self.devices.keys() { 271 | entries.push((Kind::Device, name)); 272 | } 273 | for name in self.blocks.keys() { 274 | entries.push((Kind::Block, name)); 275 | } 276 | for name in self.fieldsets.keys() { 277 | entries.push((Kind::Fieldset, name)); 278 | } 279 | for name in self.enums.keys() { 280 | entries.push((Kind::Enum, name)); 281 | } 282 | 283 | entries.sort(); 284 | 285 | let mut map = serializer.serialize_map(Some(entries.len()))?; 286 | for (kind, name) in entries { 287 | match kind { 288 | Kind::Device => { 289 | map.serialize_entry( 290 | &format!("device/{}", name), 291 | self.devices.get(name).unwrap(), 292 | )?; 293 | } 294 | Kind::Block => { 295 | map.serialize_entry( 296 | &format!("block/{}", name), 297 | self.blocks.get(name).unwrap(), 298 | )?; 299 | } 300 | Kind::Fieldset => { 301 | map.serialize_entry( 302 | &format!("fieldset/{}", name), 303 | self.fieldsets.get(name).unwrap(), 304 | )?; 305 | } 306 | Kind::Enum => { 307 | map.serialize_entry(&format!("enum/{}", name), self.enums.get(name).unwrap())?; 308 | } 309 | } 310 | } 311 | map.end() 312 | } 313 | } 314 | 315 | struct IRVisitor; 316 | 317 | impl<'de> Visitor<'de> for IRVisitor { 318 | type Value = IR; 319 | 320 | fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 321 | formatter.write_str("an IR") 322 | } 323 | 324 | fn visit_map(self, mut access: M) -> Result 325 | where 326 | M: MapAccess<'de>, 327 | { 328 | let mut ir = IR::new(); 329 | 330 | // While there are entries remaining in the input, add them 331 | // into our map. 332 | while let Some(key) = access.next_key()? { 333 | let key: String = key; 334 | let (kind, name) = key.split_once('/').ok_or(de::Error::custom("item names must be in form `kind/name`, where kind is `block`, `device`, `fieldset` or `enum`"))?; 335 | match kind { 336 | "device" => { 337 | let val: Device = access.next_value()?; 338 | if ir.devices.insert(name.to_string(), val).is_some() { 339 | return Err(de::Error::custom(format!("Duplicate item {:?}", key))); 340 | } 341 | } 342 | "block" => { 343 | let val: Block = access.next_value()?; 344 | if ir.blocks.insert(name.to_string(), val).is_some() { 345 | return Err(de::Error::custom(format!("Duplicate item {:?}", key))); 346 | } 347 | } 348 | "fieldset" => { 349 | let val: FieldSet = access.next_value()?; 350 | if ir.fieldsets.insert(name.to_string(), val).is_some() { 351 | return Err(de::Error::custom(format!("Duplicate item {:?}", key))); 352 | } 353 | } 354 | "enum" => { 355 | let val: Enum = access.next_value()?; 356 | if ir.enums.insert(name.to_string(), val).is_some() { 357 | return Err(de::Error::custom(format!("Duplicate item {:?}", key))); 358 | } 359 | } 360 | _ => return Err(de::Error::custom(format!("Unknown kind {:?}", kind))), 361 | } 362 | } 363 | 364 | Ok(ir) 365 | } 366 | } 367 | 368 | impl<'de> Deserialize<'de> for IR { 369 | fn deserialize(deserializer: D) -> Result 370 | where 371 | D: Deserializer<'de>, 372 | { 373 | deserializer.deserialize_map(IRVisitor) 374 | } 375 | } 376 | 377 | fn ordered_map(value: &BTreeMap, serializer: S) -> Result 378 | where 379 | S: Serializer, 380 | { 381 | let ordered: BTreeMap<_, _> = value.iter().collect(); 382 | ordered.serialize(serializer) 383 | } 384 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod generate; 2 | pub mod ir; 3 | pub mod svd2ir; 4 | pub mod transform; 5 | pub mod util; 6 | pub mod validate; 7 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #![recursion_limit = "128"] 2 | 3 | use anyhow::{bail, Context, Result}; 4 | use chiptool::{generate, svd2ir}; 5 | use clap::Parser; 6 | use log::*; 7 | use regex::Regex; 8 | use std::collections::BTreeSet; 9 | use std::fs; 10 | use std::io::Read; 11 | use std::path::PathBuf; 12 | use std::{fs::File, io::stdout}; 13 | use svd_parser::ValidateLevel; 14 | 15 | use chiptool::ir::IR; 16 | 17 | #[derive(Parser)] 18 | #[clap(version = "1.0", author = "Dirbaio ")] 19 | struct Opts { 20 | #[clap(subcommand)] 21 | subcommand: Subcommand, 22 | } 23 | 24 | #[derive(Parser)] 25 | enum Subcommand { 26 | Generate(Generate), 27 | ExtractAll(ExtractAll), 28 | ExtractPeripheral(ExtractPeripheral), 29 | Transform(Transform), 30 | Fmt(Fmt), 31 | Check(Check), 32 | GenBlock(GenBlock), 33 | } 34 | 35 | /// Extract peripheral from SVD to YAML 36 | #[derive(Parser)] 37 | struct ExtractPeripheral { 38 | /// SVD file path 39 | #[clap(long)] 40 | svd: String, 41 | /// Peripheral from the SVD 42 | #[clap(long)] 43 | peripheral: String, 44 | /// Transforms file path 45 | #[clap(long)] 46 | transform: Vec, 47 | } 48 | 49 | /// Extract all peripherals from SVD to YAML 50 | #[derive(Parser)] 51 | struct ExtractAll { 52 | /// SVD file path 53 | #[clap(long)] 54 | svd: String, 55 | /// Output directory. Each peripheral will be created as a YAML file here. 56 | #[clap(short, long)] 57 | output: String, 58 | } 59 | 60 | /// Apply transform to YAML 61 | #[derive(Parser)] 62 | struct Transform { 63 | /// Input YAML path 64 | #[clap(short, long)] 65 | input: String, 66 | /// Output YAML path 67 | #[clap(short, long)] 68 | output: String, 69 | /// Transforms file path 70 | #[clap(short, long)] 71 | transform: String, 72 | } 73 | 74 | /// Generate a PAC directly from a SVD 75 | #[derive(Parser)] 76 | struct Generate { 77 | /// SVD file path 78 | #[clap(long)] 79 | svd: String, 80 | /// Transforms file path 81 | #[clap(long)] 82 | transform: Vec, 83 | /// Use an external `common` module. 84 | #[clap(long)] 85 | #[clap(value_name = "MODULE_PATH")] 86 | common_module: Option, 87 | /// Specify the feature name used in the generated code to conditionally enable defmt support. 88 | #[clap(long)] 89 | #[clap(value_name = "FEATURE")] 90 | #[clap(default_value = "defmt")] 91 | #[clap(conflicts_with = "no_defmt")] 92 | defmt_feature: String, 93 | /// Do not add defmt support to the generated code at all. 94 | #[clap(long)] 95 | no_defmt: bool, 96 | } 97 | 98 | /// Reformat a YAML 99 | #[derive(Parser)] 100 | struct Fmt { 101 | /// Peripheral file path 102 | files: Vec, 103 | /// Error if incorrectly formatted, instead of fixing. 104 | #[clap(long)] 105 | check: bool, 106 | /// Remove unused enums 107 | #[clap(long)] 108 | remove_unused: bool, 109 | } 110 | 111 | /// Check a YAML for errors. 112 | #[derive(Parser)] 113 | struct Check { 114 | /// Peripheral file path 115 | files: Vec, 116 | 117 | #[clap(long)] 118 | allow_register_overlap: bool, 119 | #[clap(long)] 120 | allow_field_overlap: bool, 121 | #[clap(long)] 122 | allow_enum_dup_value: bool, 123 | #[clap(long)] 124 | allow_unused_enums: bool, 125 | #[clap(long)] 126 | allow_unused_fieldsets: bool, 127 | } 128 | 129 | /// Generate Rust code from a YAML register block 130 | #[derive(Parser)] 131 | struct GenBlock { 132 | /// Input YAML path 133 | #[clap(short, long)] 134 | input: String, 135 | /// Output YAML path 136 | #[clap(short, long)] 137 | output: String, 138 | /// Use an external `common` module. 139 | #[clap(long)] 140 | #[clap(value_name = "MODULE_PATH")] 141 | common_module: Option, 142 | } 143 | 144 | fn main() -> Result<()> { 145 | env_logger::init(); 146 | 147 | let opts: Opts = Opts::parse(); 148 | 149 | match opts.subcommand { 150 | Subcommand::ExtractPeripheral(x) => extract_peripheral(x), 151 | Subcommand::ExtractAll(x) => extract_all(x), 152 | Subcommand::Generate(x) => gen(x), 153 | Subcommand::Transform(x) => transform(x), 154 | Subcommand::Fmt(x) => fmt(x), 155 | Subcommand::Check(x) => check(x), 156 | Subcommand::GenBlock(x) => gen_block(x), 157 | } 158 | } 159 | 160 | fn load_svd(path: &str) -> Result { 161 | let xml = &mut String::new(); 162 | File::open(path) 163 | .context("Cannot open the SVD file")? 164 | .read_to_string(xml) 165 | .context("Cannot read the SVD file")?; 166 | 167 | let config = svd_parser::Config::default() 168 | .expand_properties(true) 169 | .validate_level(ValidateLevel::Disabled); 170 | let device = svd_parser::parse_with_config(xml, &config)?; 171 | Ok(device) 172 | } 173 | 174 | fn load_config(path: &str) -> Result { 175 | let config = fs::read(path).context("Cannot read the config file")?; 176 | serde_yaml::from_slice(&config).context("cannot deserialize config") 177 | } 178 | 179 | fn extract_peripheral(args: ExtractPeripheral) -> Result<()> { 180 | let svd = load_svd(&args.svd)?; 181 | let mut ir = IR::new(); 182 | 183 | let peri = args.peripheral; 184 | let mut p = svd 185 | .peripherals 186 | .iter() 187 | .find(|p| p.name == peri) 188 | .expect("peripheral not found"); 189 | 190 | if let Some(f) = &p.derived_from { 191 | p = svd 192 | .peripherals 193 | .iter() 194 | .find(|p| p.name == *f) 195 | .expect("derivedFrom peripheral not found"); 196 | } 197 | 198 | chiptool::svd2ir::convert_peripheral(&mut ir, p)?; 199 | 200 | // Descriptions in SVD's contain a lot of noise and weird formatting. Clean them up. 201 | let description_cleanups = [ 202 | // Fix weird newline spam in descriptions. 203 | (Regex::new("[ \n]+").unwrap(), " "), 204 | // Fix weird tab and cr spam in descriptions. 205 | (Regex::new("[\r\t]+").unwrap(), " "), 206 | // Replace double-space (end of sentence) with period. 207 | ( 208 | Regex::new(r"(?.*?)[\s]{2}(?.*)").unwrap(), 209 | "$first_sentence. $next_sentence", 210 | ), 211 | // Make sure every description ends with a period. 212 | ( 213 | Regex::new(r"(?.*)(?[\s'[^\.\s']])$").unwrap(), 214 | "$full_description$last_character.", 215 | ), 216 | // Eliminate space characters between end of description and the closing period. 217 | ( 218 | Regex::new(r"(?.*)\s\.$").unwrap(), 219 | "$full_description.", 220 | ), 221 | ]; 222 | for (re, rep) in description_cleanups.iter() { 223 | chiptool::transform::map_descriptions(&mut ir, |d| re.replace_all(d, *rep).into_owned())?; 224 | } 225 | 226 | for transform in args.transform { 227 | apply_transform(&mut ir, transform)?; 228 | } 229 | 230 | // Ensure consistent sort order in the YAML. 231 | chiptool::transform::sort::Sort {}.run(&mut ir).unwrap(); 232 | 233 | serde_yaml::to_writer(stdout(), &ir).unwrap(); 234 | Ok(()) 235 | } 236 | 237 | fn extract_all(args: ExtractAll) -> Result<()> { 238 | std::fs::create_dir_all(&args.output)?; 239 | 240 | let svd = load_svd(&args.svd)?; 241 | 242 | for p in &svd.peripherals { 243 | if p.derived_from.is_some() { 244 | continue; 245 | } 246 | 247 | let mut ir = IR::new(); 248 | chiptool::svd2ir::convert_peripheral(&mut ir, p)?; 249 | 250 | // Fix weird newline spam in descriptions. 251 | let re = Regex::new("[ \n]+").unwrap(); 252 | chiptool::transform::map_descriptions(&mut ir, |d| re.replace_all(d, " ").into_owned())?; 253 | 254 | // Ensure consistent sort order in the YAML. 255 | chiptool::transform::sort::Sort {}.run(&mut ir).unwrap(); 256 | 257 | let f = File::create(PathBuf::from(&args.output).join(format!("{}.yaml", p.name)))?; 258 | serde_yaml::to_writer(f, &ir).unwrap(); 259 | } 260 | 261 | Ok(()) 262 | } 263 | 264 | fn gen(args: Generate) -> Result<()> { 265 | let svd = load_svd(&args.svd)?; 266 | let mut ir = svd2ir::convert_svd(&svd)?; 267 | 268 | // Fix weird newline spam in descriptions. 269 | let re = Regex::new("[ \n]+").unwrap(); 270 | chiptool::transform::map_descriptions(&mut ir, |d| re.replace_all(d, " ").into_owned())?; 271 | 272 | for transform in args.transform { 273 | apply_transform(&mut ir, transform)?; 274 | } 275 | 276 | let common_module = match args.common_module { 277 | None => generate::CommonModule::Builtin, 278 | Some(module) => generate::CommonModule::External(module.tokens()), 279 | }; 280 | let defmt_feature = match args.no_defmt { 281 | true => None, 282 | false => Some(args.defmt_feature), 283 | }; 284 | let generate_opts = generate::Options::default() 285 | .with_common_module(common_module) 286 | .with_defmt_feature(defmt_feature); 287 | let items = generate::render(&ir, &generate_opts).unwrap(); 288 | fs::write("lib.rs", items.to_string())?; 289 | 290 | let device_x = generate::render_device_x(&ir, ir.devices.values().next().unwrap())?; 291 | fs::write("device.x", device_x)?; 292 | 293 | Ok(()) 294 | } 295 | 296 | fn transform(args: Transform) -> Result<()> { 297 | let data = fs::read(&args.input)?; 298 | let mut ir: IR = serde_yaml::from_slice(&data)?; 299 | apply_transform(&mut ir, args.transform)?; 300 | 301 | let data = serde_yaml::to_string(&ir)?; 302 | fs::write(&args.output, data.as_bytes())?; 303 | 304 | Ok(()) 305 | } 306 | 307 | fn fmt(args: Fmt) -> Result<()> { 308 | for file in args.files { 309 | let got_data = fs::read(&file)?; 310 | let mut ir: IR = serde_yaml::from_slice(&got_data)?; 311 | 312 | if args.remove_unused { 313 | let mut used_enums = BTreeSet::new(); 314 | for fs in ir.fieldsets.values_mut() { 315 | for f in fs.fields.iter_mut().filter(|f| f.enumm.is_some()) { 316 | used_enums.insert(f.enumm.as_ref().unwrap().clone()); 317 | } 318 | } 319 | 320 | ir.enums.retain(|name, _| used_enums.contains(name)); 321 | } 322 | 323 | // Ensure consistent sort order in the YAML. 324 | chiptool::transform::sort::Sort {}.run(&mut ir).unwrap(); 325 | 326 | // Trim all descriptions 327 | 328 | let cleanup = |s: &mut Option| { 329 | if let Some(s) = s.as_mut() { 330 | *s = s.trim().to_string() 331 | } 332 | }; 333 | 334 | for b in ir.blocks.values_mut() { 335 | cleanup(&mut b.description); 336 | for i in b.items.iter_mut() { 337 | cleanup(&mut i.description); 338 | } 339 | } 340 | 341 | for b in ir.fieldsets.values_mut() { 342 | cleanup(&mut b.description); 343 | for i in b.fields.iter_mut() { 344 | cleanup(&mut i.description); 345 | } 346 | } 347 | 348 | for b in ir.enums.values_mut() { 349 | cleanup(&mut b.description); 350 | for i in b.variants.iter_mut() { 351 | cleanup(&mut i.description); 352 | } 353 | } 354 | 355 | let want_data = serde_yaml::to_string(&ir)?; 356 | 357 | if got_data != want_data.as_bytes() { 358 | if args.check { 359 | bail!("File {} is not correctly formatted", &file); 360 | } else { 361 | fs::write(&file, want_data)?; 362 | } 363 | } 364 | } 365 | Ok(()) 366 | } 367 | 368 | fn check(args: Check) -> Result<()> { 369 | let opts = chiptool::validate::Options { 370 | allow_enum_dup_value: args.allow_enum_dup_value, 371 | allow_field_overlap: args.allow_field_overlap, 372 | allow_register_overlap: args.allow_register_overlap, 373 | allow_unused_enums: args.allow_unused_enums, 374 | allow_unused_fieldsets: args.allow_unused_fieldsets, 375 | }; 376 | 377 | let mut fails = 0; 378 | 379 | for file in args.files { 380 | let got_data = fs::read(&file)?; 381 | let ir: IR = serde_yaml::from_slice(&got_data)?; 382 | let errs = chiptool::validate::validate(&ir, opts.clone()); 383 | fails += errs.len(); 384 | for e in errs { 385 | println!("{}: {}", &file, e); 386 | } 387 | } 388 | 389 | if fails != 0 { 390 | bail!("{} failures", fails) 391 | } 392 | 393 | Ok(()) 394 | } 395 | 396 | fn gen_block(args: GenBlock) -> Result<()> { 397 | let data = fs::read(&args.input)?; 398 | let mut ir: IR = serde_yaml::from_slice(&data)?; 399 | 400 | chiptool::transform::sanitize::Sanitize {} 401 | .run(&mut ir) 402 | .unwrap(); 403 | 404 | // Ensure consistent sort order in the YAML. 405 | chiptool::transform::sort::Sort {}.run(&mut ir).unwrap(); 406 | 407 | let common_module = match args.common_module { 408 | None => generate::CommonModule::Builtin, 409 | Some(module) => generate::CommonModule::External(module.tokens()), 410 | }; 411 | let generate_opts = generate::Options::default().with_common_module(common_module); 412 | let items = generate::render(&ir, &generate_opts).unwrap(); 413 | fs::write(&args.output, items.to_string())?; 414 | 415 | Ok(()) 416 | } 417 | #[derive(Default, serde::Serialize, serde::Deserialize)] 418 | struct Config { 419 | #[serde(default)] 420 | includes: Vec, 421 | #[serde(default)] 422 | transforms: Vec, 423 | } 424 | 425 | fn apply_transform>(ir: &mut IR, p: P) -> anyhow::Result<()> { 426 | info!("applying transform {}", p.as_ref().display()); 427 | let config = load_config(p.as_ref().to_str().unwrap())?; 428 | 429 | for include in &config.includes { 430 | let subp = p.as_ref().parent().unwrap().join(include); 431 | apply_transform(ir, subp)?; 432 | } 433 | for transform in &config.transforms { 434 | info!("running {:?}", transform); 435 | transform.run(ir)?; 436 | } 437 | 438 | Ok(()) 439 | } 440 | 441 | /// Struct holding a valid module path as a string. 442 | /// 443 | /// Implements `FromStr` so it can be used directly as command line argument. 444 | #[derive(Clone)] 445 | struct ModulePath { 446 | path: String, 447 | } 448 | 449 | impl ModulePath { 450 | /// Get the module path as a TokenStream. 451 | fn tokens(&self) -> proc_macro2::TokenStream { 452 | self.path.parse().unwrap() 453 | } 454 | } 455 | 456 | impl std::str::FromStr for ModulePath { 457 | type Err = anyhow::Error; 458 | 459 | fn from_str(data: &str) -> Result { 460 | data.parse::() 461 | .map_err(|e| anyhow::anyhow!("{e}"))?; 462 | 463 | for (i, component) in data.split("::").enumerate() { 464 | if component.is_empty() && i != 0 { 465 | anyhow::bail!("path components can not be empty") 466 | } 467 | for (i, c) in component.chars().enumerate() { 468 | if c.is_alphabetic() || c == '_' { 469 | continue; 470 | } 471 | if i > 0 && c.is_alphanumeric() { 472 | continue; 473 | } 474 | anyhow::bail!("path components may only consist of letters, digits and underscore") 475 | } 476 | } 477 | 478 | Ok(Self { path: data.into() }) 479 | } 480 | } 481 | -------------------------------------------------------------------------------- /src/svd2ir.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use std::collections::{BTreeMap, BTreeSet}; 3 | use svd_parser::svd; 4 | 5 | use crate::util; 6 | use crate::{ir::*, transform}; 7 | 8 | #[derive(Debug)] 9 | struct ProtoBlock { 10 | name: Vec, 11 | description: Option, 12 | registers: Vec, 13 | } 14 | 15 | #[derive(Debug)] 16 | struct ProtoFieldset { 17 | name: Vec, 18 | description: Option, 19 | bit_size: u32, 20 | fields: Vec, 21 | } 22 | 23 | #[derive(Debug)] 24 | struct ProtoEnum { 25 | name: Vec, 26 | bit_size: u32, 27 | variants: Vec, 28 | } 29 | 30 | pub fn convert_peripheral(ir: &mut IR, p: &svd::Peripheral) -> anyhow::Result<()> { 31 | let mut blocks = Vec::new(); 32 | let pname = p.header_struct_name.clone().unwrap_or(p.name.clone()); 33 | collect_blocks( 34 | &mut blocks, 35 | vec![pname], 36 | p.description.clone(), 37 | p.registers.as_deref().unwrap_or(&[]), 38 | ); 39 | 40 | let enum_from_name = enum_map(&blocks); 41 | let mut fieldsets: Vec = Vec::new(); 42 | let mut enums: Vec = Vec::new(); 43 | 44 | for block in &blocks { 45 | for r in &block.registers { 46 | if let svd::RegisterCluster::Register(r) = r { 47 | if r.derived_from.is_some() { 48 | continue; 49 | } 50 | 51 | if let Some(fields) = &r.fields { 52 | let mut fieldset_name = block.name.clone(); 53 | fieldset_name.push(util::replace_suffix(&r.name, "")); 54 | fieldsets.push(ProtoFieldset { 55 | name: fieldset_name.clone(), 56 | description: r.description.clone(), 57 | bit_size: r.properties.size.unwrap_or(32), 58 | fields: fields.clone(), 59 | }); 60 | 61 | for f in fields { 62 | if f.derived_from.is_some() { 63 | continue; 64 | } 65 | 66 | let mut enum_read = None; 67 | let mut enum_write = None; 68 | let mut enum_readwrite = None; 69 | 70 | let field_name = util::replace_suffix(&f.name, ""); 71 | 72 | for e in &f.enumerated_values { 73 | let e = if let Some(derived_from) = &e.derived_from { 74 | let Some(e) = enum_from_name.get(derived_from.as_str()) else { 75 | warn!( 76 | "unknown enum to derive from ({} -> {})", 77 | field_name, derived_from 78 | ); 79 | continue; 80 | }; 81 | e 82 | } else { 83 | e 84 | }; 85 | 86 | let usage = e.usage.unwrap_or(svd::Usage::ReadWrite); 87 | let target = match usage { 88 | svd::Usage::Read => &mut enum_read, 89 | svd::Usage::Write => &mut enum_write, 90 | svd::Usage::ReadWrite => &mut enum_readwrite, 91 | }; 92 | 93 | if target.is_some() { 94 | warn!("ignoring enum with dup usage {:?}", usage); 95 | continue; 96 | } 97 | 98 | *target = Some(e) 99 | } 100 | 101 | enum EnumSet<'a> { 102 | Single(&'a svd::EnumeratedValues), 103 | ReadWrite(&'a svd::EnumeratedValues, &'a svd::EnumeratedValues), 104 | } 105 | 106 | let set = match (enum_read, enum_write, enum_readwrite) { 107 | (None, None, None) => None, 108 | (Some(e), None, None) => Some(EnumSet::Single(e)), 109 | (None, Some(e), None) => Some(EnumSet::Single(e)), 110 | (None, None, Some(e)) => Some(EnumSet::Single(e)), 111 | (Some(r), Some(w), None) => Some(EnumSet::ReadWrite(r, w)), 112 | (Some(r), None, Some(w)) => Some(EnumSet::ReadWrite(r, w)), 113 | (None, Some(w), Some(r)) => Some(EnumSet::ReadWrite(r, w)), 114 | (Some(_), Some(_), Some(_)) => panic!( 115 | "cannot have enumeratedvalues for read, write and readwrite!" 116 | ), 117 | }; 118 | 119 | if let Some(set) = set { 120 | let variants = match set { 121 | EnumSet::Single(e) => e.values.clone(), 122 | EnumSet::ReadWrite(r, w) => { 123 | let r_values = r.values.iter().map(|v| v.value.unwrap()); 124 | let w_values = w.values.iter().map(|v| v.value.unwrap()); 125 | let values: BTreeSet<_> = r_values.chain(w_values).collect(); 126 | let mut values: Vec<_> = values.iter().collect(); 127 | values.sort(); 128 | 129 | let r_values: BTreeMap<_, _> = 130 | r.values.iter().map(|v| (v.value.unwrap(), v)).collect(); 131 | let w_values: BTreeMap<_, _> = 132 | w.values.iter().map(|v| (v.value.unwrap(), v)).collect(); 133 | 134 | values 135 | .into_iter() 136 | .map(|&v| match (r_values.get(&v), w_values.get(&v)) { 137 | (None, None) => unreachable!(), 138 | (Some(&r), None) => r.clone(), 139 | (None, Some(&w)) => w.clone(), 140 | (Some(&r), Some(&w)) => { 141 | let mut m = r.clone(); 142 | if r.name != w.name { 143 | m.name = format!("R_{}_W_{}", r.name, w.name); 144 | } 145 | m 146 | } 147 | }) 148 | .collect() 149 | } 150 | }; 151 | 152 | let mut name = fieldset_name.clone(); 153 | name.push(field_name); 154 | enums.push(ProtoEnum { 155 | name, 156 | bit_size: f.bit_range.width, 157 | variants, 158 | }); 159 | } 160 | } 161 | }; 162 | } 163 | } 164 | } 165 | 166 | // Make all collected names unique by prefixing with parents' names if needed. 167 | let block_names = unique_names(blocks.iter().map(|x| x.name.clone()).collect()); 168 | let fieldset_names = unique_names(fieldsets.iter().map(|x| x.name.clone()).collect()); 169 | let enum_names = unique_names(enums.iter().map(|x| x.name.clone()).collect()); 170 | 171 | // Convert blocks 172 | for proto in &blocks { 173 | let mut block = Block { 174 | extends: None, 175 | description: proto.description.clone(), 176 | items: Vec::new(), 177 | }; 178 | 179 | for r in &proto.registers { 180 | match r { 181 | svd::RegisterCluster::Register(r) => { 182 | if r.derived_from.is_some() { 183 | warn!("unsupported derived_from in registers"); 184 | continue; 185 | } 186 | 187 | let fieldset_name = if r.fields.is_some() { 188 | let mut fieldset_name = proto.name.clone(); 189 | fieldset_name.push(util::replace_suffix(&r.name, "")); 190 | Some(fieldset_names.get(&fieldset_name).unwrap().clone()) 191 | } else { 192 | None 193 | }; 194 | 195 | let array = if let svd::Register::Array(_, dim) = r { 196 | Some(Array::Regular(RegularArray { 197 | len: dim.dim, 198 | stride: dim.dim_increment, 199 | })) 200 | } else { 201 | None 202 | }; 203 | 204 | let access = match r.properties.access { 205 | None => Access::ReadWrite, 206 | Some(svd::Access::ReadOnly) => Access::Read, 207 | Some(svd::Access::WriteOnly) => Access::Write, 208 | Some(svd::Access::WriteOnce) => Access::Write, 209 | Some(svd::Access::ReadWrite) => Access::ReadWrite, 210 | Some(svd::Access::ReadWriteOnce) => Access::ReadWrite, 211 | }; 212 | 213 | let block_item = BlockItem { 214 | name: util::replace_suffix(&r.name, ""), 215 | description: r.description.clone(), 216 | array, 217 | byte_offset: r.address_offset, 218 | inner: BlockItemInner::Register(Register { 219 | access, // todo 220 | bit_size: r.properties.size.unwrap_or(32), 221 | fieldset: fieldset_name.clone(), 222 | }), 223 | }; 224 | 225 | block.items.push(block_item) 226 | } 227 | svd::RegisterCluster::Cluster(c) => { 228 | if c.derived_from.is_some() { 229 | warn!("unsupported derived_from in clusters"); 230 | continue; 231 | } 232 | 233 | let cname = util::replace_suffix(&c.name, ""); 234 | 235 | let array = if let svd::Cluster::Array(_, dim) = c { 236 | Some(Array::Regular(RegularArray { 237 | len: dim.dim, 238 | stride: dim.dim_increment, 239 | })) 240 | } else { 241 | None 242 | }; 243 | 244 | let mut block_name = proto.name.clone(); 245 | block_name.push(util::replace_suffix(&c.name, "")); 246 | let block_name = block_names.get(&block_name).unwrap().clone(); 247 | 248 | block.items.push(BlockItem { 249 | name: cname.clone(), 250 | description: c.description.clone(), 251 | array, 252 | byte_offset: c.address_offset, 253 | inner: BlockItemInner::Block(BlockItemBlock { block: block_name }), 254 | }); 255 | } 256 | } 257 | } 258 | 259 | let block_name = block_names.get(&proto.name).unwrap().clone(); 260 | assert!(ir.blocks.insert(block_name, block).is_none()) 261 | } 262 | 263 | // Convert fieldsets 264 | for proto in &fieldsets { 265 | let mut fieldset = FieldSet { 266 | extends: None, 267 | description: proto.description.clone(), 268 | bit_size: proto.bit_size, 269 | fields: Vec::new(), 270 | }; 271 | 272 | for f in &proto.fields { 273 | if f.derived_from.is_some() { 274 | warn!("unsupported derived_from in fieldset"); 275 | } 276 | 277 | let array = if let svd::Field::Array(_, dim) = f { 278 | Some(Array::Regular(RegularArray { 279 | len: dim.dim, 280 | stride: dim.dim_increment, 281 | })) 282 | } else { 283 | None 284 | }; 285 | 286 | let field_name = util::replace_suffix(&f.name, ""); 287 | 288 | let mut field = Field { 289 | name: field_name.clone(), 290 | description: f.description.clone(), 291 | bit_offset: BitOffset::Regular(f.bit_range.offset), 292 | bit_size: f.bit_range.width, 293 | array, 294 | enumm: None, 295 | }; 296 | 297 | if !f.enumerated_values.is_empty() { 298 | let mut enum_name = proto.name.clone(); 299 | enum_name.push(field_name); 300 | 301 | trace!("finding enum {:?}", enum_name); 302 | let enum_name = enum_names.get(&enum_name).unwrap().clone(); 303 | trace!("found {:?}", enum_name); 304 | field.enumm = Some(enum_name.clone()) 305 | } 306 | 307 | fieldset.fields.push(field) 308 | } 309 | 310 | let fieldset_name = fieldset_names.get(&proto.name).unwrap().clone(); 311 | assert!(ir.fieldsets.insert(fieldset_name, fieldset).is_none()) 312 | } 313 | 314 | for proto in &enums { 315 | let variants = proto 316 | .variants 317 | .iter() 318 | .map(|v| EnumVariant { 319 | description: v.description.clone(), 320 | name: v.name.clone(), 321 | value: v.value.unwrap() as _, // TODO what are variants without values used for?? 322 | }) 323 | .collect(); 324 | 325 | let enumm = Enum { 326 | description: None, 327 | bit_size: proto.bit_size, 328 | variants, 329 | }; 330 | 331 | let enum_name = enum_names.get(&proto.name).unwrap().clone(); 332 | assert!(ir.enums.insert(enum_name.clone(), enumm).is_none()); 333 | } 334 | 335 | Ok(()) 336 | } 337 | 338 | pub fn convert_svd(svd: &svd::Device) -> anyhow::Result { 339 | let mut ir = IR::new(); 340 | 341 | let mut device = Device { 342 | nvic_priority_bits: svd.cpu.as_ref().map(|cpu| cpu.nvic_priority_bits as u8), 343 | peripherals: vec![], 344 | interrupts: vec![], 345 | }; 346 | 347 | for p in &svd.peripherals { 348 | let base_p = if let Some(derived) = &p.derived_from { 349 | svd.peripherals.iter().find(|p| p.name == *derived).unwrap() 350 | } else { 351 | p 352 | }; 353 | let block_name = base_p 354 | .header_struct_name 355 | .clone() 356 | .unwrap_or(base_p.name.clone()); 357 | let block_path = format!("{}::{}", block_name, block_name); 358 | let peri_name = p.name.to_ascii_uppercase(); 359 | 360 | let peri = Peripheral { 361 | name: peri_name.clone(), 362 | description: p.description.clone(), 363 | base_address: p.base_address, 364 | block: Some(block_path), 365 | array: None, 366 | interrupts: BTreeMap::new(), 367 | }; 368 | 369 | let mut irqs: Vec<&svd::Interrupt> = vec![]; 370 | for i in &p.interrupt { 371 | if !irqs.iter().any(|&j| j.name == i.name) { 372 | irqs.push(i) 373 | } 374 | } 375 | irqs.sort_by_key(|i| &i.name); 376 | 377 | for &i in irqs.iter() { 378 | let iname = i.name.to_ascii_uppercase(); 379 | 380 | if !device.interrupts.iter().any(|j| j.name == iname) { 381 | device.interrupts.push(Interrupt { 382 | name: iname.clone(), 383 | description: i.description.clone(), 384 | value: i.value, 385 | }); 386 | } 387 | 388 | /* 389 | let name = if iname.len() > periname.len() && iname.starts_with(&periname) { 390 | let s = iname.strip_prefix(&periname).unwrap(); 391 | s.trim_matches('_').to_string() 392 | } else if irqs.len() == 1 { 393 | "IRQ".to_string() 394 | } else { 395 | format!("IRQ{}", n) 396 | }; 397 | 398 | peri.interrupts.insert(name, iname.clone()); 399 | */ 400 | } 401 | 402 | device.peripherals.push(peri); 403 | 404 | if p.derived_from.is_none() { 405 | let mut pir = IR::new(); 406 | convert_peripheral(&mut pir, p)?; 407 | 408 | transform::map_names(&mut pir, |k, s| match k { 409 | transform::NameKind::Block => *s = format!("{}::{}", block_name, s), 410 | transform::NameKind::Fieldset => *s = format!("{}::regs::{}", block_name, s), 411 | transform::NameKind::Enum => *s = format!("{}::vals::{}", block_name, s), 412 | _ => {} 413 | }); 414 | 415 | ir.merge(pir); 416 | } 417 | } 418 | 419 | ir.devices.insert("".to_string(), device); 420 | 421 | transform::sort::Sort {}.run(&mut ir).unwrap(); 422 | transform::sanitize::Sanitize {}.run(&mut ir).unwrap(); 423 | 424 | Ok(ir) 425 | } 426 | 427 | /// Create a map of all enums by name. 428 | /// Ignores potential duplicates of names. 429 | fn enum_map(blocks: &[ProtoBlock]) -> BTreeMap<&'_ str, &'_ svd::EnumeratedValues> { 430 | let mut map = BTreeMap::new(); 431 | for block in blocks { 432 | for r in &block.registers { 433 | let svd::RegisterCluster::Register(r) = r else { 434 | continue; 435 | }; 436 | if r.derived_from.is_some() { 437 | continue; 438 | } 439 | let Some(fields) = &r.fields else { continue }; 440 | for f in fields { 441 | for e in &f.enumerated_values { 442 | if let Some(name) = &e.name { 443 | map.insert(name.as_str(), e); 444 | } 445 | } 446 | } 447 | } 448 | } 449 | map 450 | } 451 | 452 | fn collect_blocks( 453 | out: &mut Vec, 454 | block_name: Vec, 455 | description: Option, 456 | registers: &[svd::RegisterCluster], 457 | ) { 458 | out.push(ProtoBlock { 459 | name: block_name.clone(), 460 | description, 461 | registers: registers.to_owned(), 462 | }); 463 | 464 | for r in registers { 465 | if let svd::RegisterCluster::Cluster(c) = r { 466 | if c.derived_from.is_some() { 467 | continue; 468 | } 469 | 470 | let mut block_name = block_name.clone(); 471 | block_name.push(util::replace_suffix(&c.name, "")); 472 | collect_blocks(out, block_name, c.description.clone(), &c.children); 473 | } 474 | } 475 | } 476 | 477 | fn unique_names(names: Vec>) -> BTreeMap, String> { 478 | let names2 = names 479 | .iter() 480 | .map(|n| { 481 | // asfd 482 | let mut res = Vec::new(); 483 | let mut prefix = String::new(); 484 | for s in n.iter() { 485 | if s == "PSEL" { 486 | if !prefix.is_empty() { 487 | prefix.push('_'); 488 | } 489 | prefix.push_str(s); 490 | } else if prefix.is_empty() { 491 | res.push(s.clone()); 492 | } else { 493 | res.push(format!("{prefix}_{s}")); 494 | prefix = String::new() 495 | } 496 | } 497 | if !prefix.is_empty() { 498 | res.push(prefix); 499 | } 500 | res 501 | }) 502 | .collect::>(); 503 | 504 | let mut res = BTreeMap::new(); 505 | let mut seen = BTreeSet::new(); 506 | 507 | let suffix_exists = |n: &[String], i: usize| { 508 | names2 509 | .iter() 510 | .enumerate() 511 | .filter(|(j, _)| *j != i) 512 | .any(|(_, n2)| n2.ends_with(n)) 513 | }; 514 | for (i, n) in names2.iter().enumerate() { 515 | let j = (0..n.len()) 516 | .rev() 517 | .find(|&j| !suffix_exists(&n[j..], i)) 518 | .or_else(|| (0..n.len()).rev().find(|&j| !seen.contains(&n[j..]))) 519 | .unwrap(); 520 | assert!(res.insert(names[i].clone(), n[j..].join("_")).is_none()); 521 | seen.insert(&n[j..]); 522 | } 523 | res 524 | } 525 | -------------------------------------------------------------------------------- /src/transform/add.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use crate::ir::*; 4 | 5 | #[derive(Debug, Serialize, Deserialize)] 6 | pub struct Add { 7 | ir: IR, 8 | } 9 | 10 | impl Add { 11 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 12 | ir.merge(self.ir.clone()); 13 | Ok(()) 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/transform/add_enum_variants.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct AddEnumVariants { 8 | #[serde(rename = "enum")] 9 | pub enumm: RegexSet, 10 | pub variants: Vec, 11 | } 12 | 13 | impl AddEnumVariants { 14 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 15 | for id in match_all(ir.enums.keys().cloned(), &self.enumm) { 16 | let d = ir.enums.get_mut(&id).unwrap(); 17 | d.variants.extend(self.variants.clone()); 18 | } 19 | 20 | Ok(()) 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/transform/add_fields.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct AddFields { 8 | pub fieldset: RegexSet, 9 | pub fields: Vec, 10 | } 11 | 12 | impl AddFields { 13 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 14 | for id in match_all(ir.fieldsets.keys().cloned(), &self.fieldset) { 15 | let d = ir.fieldsets.get_mut(&id).unwrap(); 16 | d.fields.extend(self.fields.clone()); 17 | } 18 | 19 | Ok(()) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/transform/add_interrupts.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct AddInterrupts { 8 | pub devices: RegexSet, 9 | pub interrupts: Vec, 10 | } 11 | 12 | impl AddInterrupts { 13 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 14 | for id in match_all(ir.devices.keys().cloned(), &self.devices) { 15 | let d = ir.devices.get_mut(&id).unwrap(); 16 | d.interrupts.extend(self.interrupts.clone()); 17 | } 18 | Ok(()) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/transform/add_registers.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct AddRegisters { 8 | pub block: RegexSet, 9 | pub registers: Vec, 10 | } 11 | 12 | impl AddRegisters { 13 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 14 | for id in match_all(ir.blocks.keys().cloned(), &self.block) { 15 | let d = ir.blocks.get_mut(&id).unwrap(); 16 | d.items.extend(self.registers.clone()); 17 | } 18 | 19 | Ok(()) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/transform/common.rs: -------------------------------------------------------------------------------- 1 | use anyhow::bail; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::{BTreeMap, BTreeSet}; 4 | 5 | use crate::ir::*; 6 | 7 | #[derive(Debug, Clone)] 8 | pub struct RegexSet { 9 | include: Vec, 10 | exclude: Vec, 11 | } 12 | 13 | impl RegexSet { 14 | pub fn captures<'h>(&self, haystack: &'h str) -> Option> { 15 | for r in &self.exclude { 16 | if r.is_match(haystack) { 17 | return None; 18 | } 19 | } 20 | for r in &self.include { 21 | if let Some(c) = r.captures(haystack) { 22 | return Some(c); 23 | } 24 | } 25 | None 26 | } 27 | 28 | pub fn is_match(&self, haystack: &str) -> bool { 29 | for r in &self.exclude { 30 | if r.is_match(haystack) { 31 | return false; 32 | } 33 | } 34 | for r in &self.include { 35 | if r.is_match(haystack) { 36 | return true; 37 | } 38 | } 39 | false 40 | } 41 | } 42 | 43 | impl<'de> Deserialize<'de> for RegexSet { 44 | fn deserialize(de: D) -> Result 45 | where 46 | D: serde::Deserializer<'de>, 47 | { 48 | fn make_regex(r: &str) -> Result { 49 | regex::Regex::new(&format!("^{}$", r)) 50 | } 51 | 52 | #[derive(Deserialize)] 53 | #[serde(untagged)] 54 | enum VecOrString { 55 | One(String), 56 | Many(Vec), 57 | } 58 | impl VecOrString { 59 | fn regexes(self) -> Vec { 60 | let strs = match self { 61 | VecOrString::Many(s) => s, 62 | VecOrString::One(s) => vec![s], 63 | }; 64 | strs.into_iter().map(|s| make_regex(&s).unwrap()).collect() 65 | } 66 | } 67 | 68 | impl Default for VecOrString { 69 | fn default() -> Self { 70 | Self::Many(vec![]) 71 | } 72 | } 73 | 74 | #[derive(Deserialize)] 75 | #[serde(untagged)] 76 | enum Inner { 77 | String(String), 78 | Complex { 79 | include: VecOrString, 80 | #[serde(default)] 81 | exclude: VecOrString, 82 | }, 83 | } 84 | 85 | let x = Inner::deserialize(de)?; 86 | match x { 87 | Inner::String(s) => Ok(RegexSet { 88 | include: vec![make_regex(&s).unwrap()], 89 | exclude: vec![], 90 | }), 91 | Inner::Complex { include, exclude } => Ok(RegexSet { 92 | include: include.regexes(), 93 | exclude: exclude.regexes(), 94 | }), 95 | } 96 | } 97 | } 98 | 99 | impl Serialize for RegexSet { 100 | fn serialize(&self, _: S) -> Result 101 | where 102 | S: serde::Serializer, 103 | { 104 | todo!() 105 | } 106 | } 107 | 108 | #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Serialize, Deserialize)] 109 | pub enum CheckLevel { 110 | NoCheck, 111 | Layout, 112 | Names, 113 | Descriptions, 114 | } 115 | 116 | impl Default for CheckLevel { 117 | fn default() -> Self { 118 | Self::Names 119 | } 120 | } 121 | 122 | pub(crate) fn check_mergeable_fieldsets( 123 | a_name: &str, 124 | a: &FieldSet, 125 | b_name: &str, 126 | b: &FieldSet, 127 | level: CheckLevel, 128 | ) -> anyhow::Result<()> { 129 | if let Err(e) = check_mergeable_fieldsets_inner(a, b, level) { 130 | bail!( 131 | "Cannot merge fieldsets.\nfirst: {} {:#?}\nsecond: {} {:#?}\ncause: {:?}", 132 | a_name, 133 | a, 134 | b_name, 135 | b, 136 | e 137 | ) 138 | } 139 | Ok(()) 140 | } 141 | 142 | pub(crate) fn mergeable_fields(a: &Field, b: &Field, level: CheckLevel) -> bool { 143 | let mut res = true; 144 | if level >= CheckLevel::Layout { 145 | res &= a.bit_size == b.bit_size 146 | && a.bit_offset == b.bit_offset 147 | && a.enumm == b.enumm 148 | && a.array == b.array; 149 | } 150 | if level >= CheckLevel::Names { 151 | res &= a.name == b.name; 152 | } 153 | if level >= CheckLevel::Descriptions { 154 | res &= a.description == b.description; 155 | } 156 | res 157 | } 158 | 159 | pub(crate) fn check_mergeable_fieldsets_inner( 160 | a: &FieldSet, 161 | b: &FieldSet, 162 | level: CheckLevel, 163 | ) -> anyhow::Result<()> { 164 | if a.bit_size != b.bit_size { 165 | bail!("Different bit size: {} vs {}", a.bit_size, b.bit_size) 166 | } 167 | 168 | if level >= CheckLevel::Layout { 169 | if a.fields.len() != b.fields.len() { 170 | bail!("Different field count") 171 | } 172 | 173 | let mut aok = [false; 128]; 174 | let mut bok = [false; 128]; 175 | 176 | for (ia, fa) in a.fields.iter().enumerate() { 177 | if let Some((ib, _fb)) = b 178 | .fields 179 | .iter() 180 | .enumerate() 181 | .find(|(ib, fb)| !bok[*ib] && mergeable_fields(fa, fb, level)) 182 | { 183 | aok[ia] = true; 184 | bok[ib] = true; 185 | } else { 186 | bail!("Field in first fieldset has no match: {:?}", fa); 187 | } 188 | } 189 | } 190 | 191 | Ok(()) 192 | } 193 | 194 | pub(crate) fn match_all(set: impl Iterator, re: &RegexSet) -> BTreeSet { 195 | let mut ids: BTreeSet = BTreeSet::new(); 196 | for id in set { 197 | if re.is_match(&id) { 198 | ids.insert(id); 199 | } 200 | } 201 | ids 202 | } 203 | 204 | pub(crate) fn match_groups( 205 | set: impl Iterator, 206 | re: &RegexSet, 207 | to: &str, 208 | ) -> BTreeMap> { 209 | let mut groups: BTreeMap> = BTreeMap::new(); 210 | for s in set { 211 | if let Some(to) = match_expand(&s, re, to) { 212 | if let Some(v) = groups.get_mut(&to) { 213 | v.insert(s); 214 | } else { 215 | let mut v = BTreeSet::new(); 216 | v.insert(s); 217 | groups.insert(to, v); 218 | } 219 | } 220 | } 221 | groups 222 | } 223 | 224 | pub(crate) fn match_expand(s: &str, regex: &RegexSet, res: &str) -> Option { 225 | let m = regex.captures(s)?; 226 | let mut dst = String::new(); 227 | m.expand(res, &mut dst); 228 | Some(dst) 229 | } 230 | 231 | pub(crate) fn replace_enum_ids(ir: &mut IR, from: &BTreeSet, to: String) { 232 | for (_, fs) in ir.fieldsets.iter_mut() { 233 | for f in fs.fields.iter_mut() { 234 | if let Some(id) = &mut f.enumm { 235 | if from.contains(id) { 236 | *id = to.clone() 237 | } 238 | } 239 | } 240 | } 241 | } 242 | 243 | pub(crate) fn replace_fieldset_ids(ir: &mut IR, from: &BTreeSet, to: String) { 244 | for (_, b) in ir.blocks.iter_mut() { 245 | for i in b.items.iter_mut() { 246 | if let BlockItemInner::Register(r) = &mut i.inner { 247 | if let Some(id) = &r.fieldset { 248 | if from.contains(id) { 249 | r.fieldset = Some(to.clone()) 250 | } 251 | } 252 | } 253 | } 254 | } 255 | } 256 | 257 | pub(crate) fn replace_block_ids(ir: &mut IR, from: &BTreeSet, to: String) { 258 | for (_, d) in ir.devices.iter_mut() { 259 | for p in d.peripherals.iter_mut() { 260 | if let Some(block) = &mut p.block { 261 | if from.contains(block) { 262 | *block = to.clone() 263 | } 264 | } 265 | } 266 | } 267 | 268 | for (_, b) in ir.blocks.iter_mut() { 269 | for i in b.items.iter_mut() { 270 | if let BlockItemInner::Block(bi) = &mut i.inner { 271 | if from.contains(&bi.block) { 272 | bi.block = to.clone() 273 | } 274 | } 275 | } 276 | } 277 | } 278 | 279 | #[derive(Serialize, Deserialize, Debug, Copy, Clone, Eq, PartialEq, Default)] 280 | pub enum ArrayMode { 281 | #[default] 282 | Standard, 283 | Cursed, 284 | Holey, 285 | } 286 | 287 | pub(crate) fn calc_array(mut offsets: Vec, mode: ArrayMode) -> anyhow::Result<(u32, Array)> { 288 | offsets.sort_unstable(); 289 | 290 | // Guess stride. 291 | let start_offset = offsets[0]; 292 | let len = offsets.len() as u32; 293 | let stride = if len == 1 { 294 | // If there's only 1 item, we can't know the stride, but it 295 | // doesn't really matter! 296 | 0 297 | } else { 298 | offsets[1] - offsets[0] 299 | }; 300 | 301 | // Check the stride guess is OK 302 | 303 | if offsets 304 | .iter() 305 | .enumerate() 306 | .all(|(n, &i)| i == start_offset + (n as u32) * stride) 307 | { 308 | // Array is regular, 309 | return Ok(( 310 | start_offset, 311 | Array::Regular(RegularArray { 312 | len: offsets.len() as _, 313 | stride, 314 | }), 315 | )); 316 | } 317 | 318 | // Array is irregular, If we wanted a regular array, fail. 319 | match mode { 320 | ArrayMode::Standard => { 321 | bail!("arrayize: items are not evenly spaced. Set `mode: Cursed` to allow index->offset relation to be non-linear, or `mode: Holey` to keep it linear but fill the holes with indexes that won't be valid.") 322 | } 323 | ArrayMode::Cursed => { 324 | for o in &mut offsets { 325 | *o -= start_offset 326 | } 327 | Ok((start_offset, Array::Cursed(CursedArray { offsets }))) 328 | } 329 | ArrayMode::Holey => { 330 | let len = (offsets.last().unwrap() - offsets.first().unwrap()) / stride + 1; 331 | Ok((start_offset, Array::Regular(RegularArray { len, stride }))) 332 | } 333 | } 334 | } 335 | 336 | // filter enum by enum name, then copy variant description 337 | pub(crate) fn extract_variant_desc( 338 | ir: &IR, 339 | enum_names: &RegexSet, 340 | bit_size: Option, 341 | ) -> anyhow::Result> { 342 | let mut enum_desc_pair: BTreeMap = BTreeMap::new(); 343 | for (e_name, e_struct) in ir.enums.iter().filter(|(e_name, e_struct)| { 344 | bit_size.map_or(true, |s| s == e_struct.bit_size) && enum_names.is_match(e_name) 345 | }) { 346 | let variant_desc_str = e_struct.variants.iter().fold(String::new(), |mut acc, v| { 347 | acc.push_str( 348 | format!( 349 | "{}: {}\n", 350 | v.value, 351 | v.description.clone().unwrap_or_default() 352 | ) 353 | .as_str(), 354 | ); 355 | acc 356 | }); 357 | 358 | enum_desc_pair.insert(e_name.clone(), variant_desc_str); 359 | } 360 | 361 | Ok(enum_desc_pair) 362 | } 363 | 364 | // filter field by enum name, then append corresponding variant description 365 | pub(crate) fn append_variant_desc_to_field( 366 | ir: &mut IR, 367 | enum_desc_pair: &BTreeMap, 368 | bit_size: Option, 369 | ) { 370 | for fs in ir.fieldsets.values_mut() { 371 | for f in fs 372 | .fields 373 | .iter_mut() 374 | .filter(|f| bit_size.map_or(true, |s| s == f.bit_size) && f.enumm.is_some()) 375 | { 376 | for (_, desc_string) in enum_desc_pair 377 | .iter() 378 | .filter(|(e_name, _)| **e_name == f.enumm.clone().unwrap()) 379 | { 380 | match &f.description { 381 | Some(desc) => { 382 | f.description = Some(format!("{}\n{}", desc.clone(), desc_string.clone())) 383 | } 384 | None => f.description = Some(desc_string.clone()), 385 | } 386 | } 387 | } 388 | } 389 | } 390 | -------------------------------------------------------------------------------- /src/transform/delete.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::BTreeSet; 4 | 5 | use super::common::*; 6 | use crate::ir::*; 7 | 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub struct Delete { 10 | pub from: RegexSet, 11 | } 12 | 13 | impl Delete { 14 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 15 | let mut ids: BTreeSet = BTreeSet::new(); 16 | for (id, _fs) in ir.fieldsets.iter() { 17 | if self.from.is_match(id) { 18 | info!("deleting fieldset {}", id); 19 | ids.insert(id.clone()); 20 | } 21 | } 22 | 23 | super::delete_fieldsets::remove_fieldset_ids(ir, &ids); 24 | 25 | for id in ids { 26 | ir.fieldsets.remove(&id); 27 | } 28 | 29 | let mut ids: BTreeSet = BTreeSet::new(); 30 | for (id, _e) in ir.enums.iter() { 31 | if self.from.is_match(id) { 32 | info!("deleting enum {}", id); 33 | ids.insert(id.clone()); 34 | } 35 | } 36 | 37 | super::delete_enums::remove_enum_ids(ir, &ids); 38 | 39 | for id in ids { 40 | ir.enums.remove(&id); 41 | } 42 | 43 | let mut ids: BTreeSet = BTreeSet::new(); 44 | for (id, _b) in ir.blocks.iter() { 45 | if self.from.is_match(id) { 46 | info!("deleting block {}", id); 47 | ids.insert(id.clone()); 48 | } 49 | } 50 | 51 | remove_block_ids(ir, &ids); 52 | 53 | for id in ids { 54 | ir.blocks.remove(&id); 55 | } 56 | 57 | Ok(()) 58 | } 59 | } 60 | 61 | pub(crate) fn remove_block_ids(ir: &mut IR, from: &BTreeSet) { 62 | for (_, b) in ir.blocks.iter_mut() { 63 | b.items.retain(|i| { 64 | if let BlockItemInner::Block(bi) = &i.inner { 65 | !from.contains(&bi.block) 66 | } else { 67 | true 68 | } 69 | }); 70 | } 71 | 72 | for (_, d) in ir.devices.iter_mut() { 73 | d.peripherals.retain(|p| match &p.block { 74 | Some(block) => !from.contains(block), 75 | None => true, 76 | }); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/transform/delete_enum_variants.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | use super::common::*; 5 | use crate::ir::*; 6 | 7 | #[derive(Debug, Serialize, Deserialize)] 8 | pub struct DeleteEnumVariants { 9 | #[serde(rename = "enum")] 10 | pub enumm: RegexSet, 11 | pub from: RegexSet, 12 | } 13 | 14 | impl DeleteEnumVariants { 15 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 16 | for id in match_all(ir.enums.keys().cloned(), &self.enumm) { 17 | let e = ir.enums.get_mut(&id).unwrap(); 18 | 19 | e.variants.retain(|variant| { 20 | if self.from.is_match(&variant.name) { 21 | info!("deleting enum variant {}::{}", id, &variant.name); 22 | return false; 23 | } 24 | 25 | true 26 | }); 27 | } 28 | 29 | Ok(()) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/transform/delete_enums.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::BTreeSet; 4 | 5 | use super::common::*; 6 | use crate::ir::*; 7 | 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub struct DeleteEnums { 10 | pub from: RegexSet, 11 | pub bit_size: Option, 12 | #[serde(default)] 13 | pub soft: bool, 14 | pub keep_desc: Option, 15 | } 16 | 17 | impl DeleteEnums { 18 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 19 | if self.keep_desc.unwrap_or(false) { 20 | let variant_desc = extract_variant_desc(ir, &self.from, self.bit_size)?; 21 | append_variant_desc_to_field(ir, &variant_desc, self.bit_size); 22 | } 23 | 24 | let mut ids: BTreeSet = BTreeSet::new(); 25 | for (id, e) in ir.enums.iter() { 26 | let bit_size_matches = self.bit_size.map_or(true, |s| s == e.bit_size); 27 | if self.from.is_match(id) && bit_size_matches { 28 | info!("deleting enum {}", id); 29 | ids.insert(id.clone()); 30 | } 31 | } 32 | 33 | remove_enum_ids(ir, &ids); 34 | 35 | if !self.soft { 36 | for id in ids { 37 | ir.enums.remove(&id); 38 | } 39 | } 40 | 41 | Ok(()) 42 | } 43 | } 44 | 45 | pub(crate) fn remove_enum_ids(ir: &mut IR, from: &BTreeSet) { 46 | for (_, fs) in ir.fieldsets.iter_mut() { 47 | for f in fs.fields.iter_mut() { 48 | if let Some(id) = &mut f.enumm { 49 | if from.contains(id) { 50 | f.enumm = None 51 | } 52 | } 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/transform/delete_enums_used_in.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::BTreeSet; 4 | 5 | use super::common::*; 6 | use super::delete_enums::remove_enum_ids; 7 | use crate::ir::*; 8 | 9 | #[derive(Debug, Serialize, Deserialize)] 10 | pub struct DeleteEnumsUsedIn { 11 | pub fieldsets: RegexSet, 12 | #[serde(default)] 13 | pub soft: bool, 14 | } 15 | 16 | impl DeleteEnumsUsedIn { 17 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 18 | let mut ids: BTreeSet = BTreeSet::new(); 19 | 20 | for (id, fs) in ir.fieldsets.iter() { 21 | if self.fieldsets.is_match(id) { 22 | info!("matched fieldset {}", id); 23 | for f in &fs.fields { 24 | if let Some(id) = &f.enumm { 25 | info!("deleting enum {}", id); 26 | ids.insert(id.clone()); 27 | } 28 | } 29 | } 30 | } 31 | 32 | remove_enum_ids(ir, &ids); 33 | 34 | if !self.soft { 35 | for id in ids { 36 | ir.enums.remove(&id); 37 | } 38 | } 39 | 40 | Ok(()) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/transform/delete_enums_with_variants.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::{BTreeMap, BTreeSet}; 4 | 5 | use super::delete_enums::remove_enum_ids; 6 | use crate::ir::*; 7 | 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub struct DeleteEnumsWithVariants { 10 | variants: BTreeMap, 11 | #[serde(default)] 12 | pub soft: bool, 13 | } 14 | 15 | impl DeleteEnumsWithVariants { 16 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 17 | let mut ids: BTreeSet = BTreeSet::new(); 18 | 19 | 'e: for (id, e) in &ir.enums { 20 | if e.variants.len() != self.variants.len() { 21 | continue; 22 | } 23 | for v in &e.variants { 24 | let Some(name) = self.variants.get(&v.value) else { 25 | continue 'e; 26 | }; 27 | if name != &v.name { 28 | continue 'e; 29 | } 30 | } 31 | info!("deleting enum {}", id); 32 | ids.insert(id.clone()); 33 | } 34 | 35 | remove_enum_ids(ir, &ids); 36 | 37 | if !self.soft { 38 | for id in ids { 39 | ir.enums.remove(&id); 40 | } 41 | } 42 | 43 | Ok(()) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/transform/delete_fields.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct DeleteFields { 8 | pub fieldset: RegexSet, 9 | pub from: RegexSet, 10 | } 11 | 12 | impl DeleteFields { 13 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 14 | for id in match_all(ir.fieldsets.keys().cloned(), &self.fieldset) { 15 | let fs = ir.fieldsets.get_mut(&id).unwrap(); 16 | fs.fields.retain(|f| !self.from.is_match(&f.name)); 17 | } 18 | Ok(()) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/transform/delete_fieldsets.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::BTreeSet; 4 | 5 | use super::common::*; 6 | use crate::ir::*; 7 | 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub struct DeleteFieldsets { 10 | pub from: RegexSet, 11 | #[serde(default)] 12 | pub useless: bool, 13 | #[serde(default)] 14 | pub soft: bool, 15 | } 16 | 17 | impl DeleteFieldsets { 18 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 19 | let mut ids: BTreeSet = BTreeSet::new(); 20 | for (id, fs) in ir.fieldsets.iter() { 21 | if self.from.is_match(id) && (!self.useless | is_useless(fs)) { 22 | info!("deleting fieldset {}", id); 23 | ids.insert(id.clone()); 24 | } 25 | } 26 | 27 | remove_fieldset_ids(ir, &ids); 28 | 29 | if !self.soft { 30 | for id in ids { 31 | ir.fieldsets.remove(&id); 32 | } 33 | } 34 | 35 | Ok(()) 36 | } 37 | } 38 | 39 | // Fieldset is useless when 40 | // 1. it has no Fields, or 41 | // 2. it has one Fields, which occupied entire Fieldset, and without a enum 42 | fn is_useless(fs: &FieldSet) -> bool { 43 | match &fs.fields[..] { 44 | [] => true, 45 | [f] => fs.bit_size == f.bit_size && f.bit_offset.min_offset() == 0 && f.enumm.is_none(), 46 | _ => false, 47 | } 48 | } 49 | 50 | pub(crate) fn remove_fieldset_ids(ir: &mut IR, from: &BTreeSet) { 51 | for (_, b) in ir.blocks.iter_mut() { 52 | for i in b.items.iter_mut() { 53 | if let BlockItemInner::Register(reg) = &mut i.inner { 54 | if let Some(id) = ®.fieldset { 55 | if from.contains(id) { 56 | reg.fieldset = None 57 | } 58 | } 59 | } 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/transform/delete_peripherals.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | use super::common::*; 5 | use crate::ir::*; 6 | 7 | #[derive(Debug, Serialize, Deserialize)] 8 | pub struct DeletePeripherals { 9 | pub devices: RegexSet, 10 | pub from: RegexSet, 11 | } 12 | 13 | impl DeletePeripherals { 14 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 15 | for id in match_all(ir.devices.keys().cloned(), &self.devices) { 16 | let d = ir.devices.get_mut(&id).unwrap(); 17 | d.peripherals.retain(|i| { 18 | info!("deleting peripheral {}", &i.name); 19 | !self.from.is_match(&i.name) 20 | }); 21 | } 22 | Ok(()) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/transform/delete_registers.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct DeleteRegisters { 8 | pub block: RegexSet, 9 | pub from: RegexSet, 10 | } 11 | 12 | impl DeleteRegisters { 13 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 14 | for id in match_all(ir.blocks.keys().cloned(), &self.block) { 15 | let b = ir.blocks.get_mut(&id).unwrap(); 16 | b.items.retain(|i| !self.from.is_match(&i.name)); 17 | } 18 | Ok(()) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/transform/delete_useless_enums.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::BTreeSet; 4 | 5 | use super::delete_enums::remove_enum_ids; 6 | use crate::ir::*; 7 | 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub struct DeleteUselessEnums { 10 | #[serde(default)] 11 | pub soft: bool, 12 | } 13 | 14 | impl DeleteUselessEnums { 15 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 16 | let mut ids: BTreeSet = BTreeSet::new(); 17 | 18 | for (id, e) in &ir.enums { 19 | if is_useless(e) { 20 | info!("deleting enum {}", id); 21 | ids.insert(id.clone()); 22 | } 23 | } 24 | 25 | remove_enum_ids(ir, &ids); 26 | 27 | if !self.soft { 28 | for id in ids { 29 | ir.enums.remove(&id); 30 | } 31 | } 32 | 33 | Ok(()) 34 | } 35 | } 36 | 37 | const USELESS_ZERO_NAMES: &[&str] = &[ 38 | "dis", 39 | "disable", 40 | "disabled", 41 | "off", 42 | "false", 43 | "no", 44 | "busy", 45 | "pending", 46 | "discon", 47 | "disconnect", 48 | "disconnected", 49 | "not_detected", 50 | "invalid", 51 | "no_effect", 52 | "passthru", 53 | ]; 54 | const USELESS_ONE_NAMES: &[&str] = &[ 55 | "en", 56 | "enable", 57 | "enabled", 58 | "on", 59 | "true", 60 | "yes", 61 | "ready", 62 | "available", 63 | "connect", 64 | "connected", 65 | "detected", 66 | "valid", 67 | "set", 68 | "clr", 69 | ]; 70 | 71 | const NOT_NAMES: &[&str] = &["not", "no", "un", "de", "in"]; 72 | 73 | fn is_useless(e: &Enum) -> bool { 74 | match e.bit_size { 75 | 0 => true, 76 | 1 => match e.variants.len() { 77 | 0 => true, 78 | 1 => true, 79 | 2 => { 80 | let zero = e.variants.iter().find(|v| v.value == 0).unwrap(); 81 | let zero_name = zero.name.to_ascii_lowercase(); 82 | let one = e.variants.iter().find(|v| v.value == 1).unwrap(); 83 | let one_name = one.name.to_ascii_lowercase(); 84 | 85 | let obvious = USELESS_ZERO_NAMES.iter().any(|s| s == &zero_name) 86 | && USELESS_ONE_NAMES.iter().any(|s| s == &one_name); 87 | let not = NOT_NAMES.iter().any(|not| { 88 | zero_name == format!("{not}{one_name}") 89 | || zero_name == format!("{not}_{one_name}") 90 | }); 91 | 92 | obvious || not 93 | } 94 | _ => unreachable!(), 95 | }, 96 | _ => false, 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/transform/expand_extends.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::{BTreeMap, BTreeSet}; 4 | 5 | use crate::ir::*; 6 | 7 | #[derive(Debug, Serialize, Deserialize)] 8 | pub struct ExpandExtends {} 9 | 10 | impl ExpandExtends { 11 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 12 | // Expand blocks 13 | let deps = ir 14 | .blocks 15 | .iter() 16 | .map(|(k, v)| (k.clone(), v.extends.clone())) 17 | .collect(); 18 | for name in topological_sort(deps) { 19 | let block = ir.blocks.get(&name).unwrap(); 20 | if let Some(parent_name) = &block.extends { 21 | let parent = ir.blocks.get(parent_name).unwrap(); 22 | 23 | let items = parent.items.clone(); 24 | let block = ir.blocks.get_mut(&name).unwrap(); 25 | 26 | for i in items { 27 | if !block.items.iter().any(|j| j.name == i.name) { 28 | block.items.push(i); 29 | } 30 | } 31 | } 32 | } 33 | // Expand fiedsets 34 | let deps = ir 35 | .fieldsets 36 | .iter() 37 | .map(|(k, v)| (k.clone(), v.extends.clone())) 38 | .collect(); 39 | for name in topological_sort(deps) { 40 | let fieldset = ir.fieldsets.get(&name).unwrap(); 41 | if let Some(parent_name) = &fieldset.extends { 42 | let parent = ir.fieldsets.get(parent_name).unwrap(); 43 | 44 | let items = parent.fields.clone(); 45 | let fieldset = ir.fieldsets.get_mut(&name).unwrap(); 46 | 47 | for i in items { 48 | if !fieldset.fields.iter().any(|j| j.name == i.name) { 49 | fieldset.fields.push(i); 50 | } 51 | } 52 | } 53 | } 54 | 55 | Ok(()) 56 | } 57 | } 58 | 59 | fn topological_sort(vals: BTreeMap>) -> Vec { 60 | for (name, dep) in &vals { 61 | info!("{:?} => {:?}", name, dep); 62 | } 63 | 64 | let mut done = BTreeSet::new(); 65 | let mut res = Vec::new(); 66 | while done.len() != vals.len() { 67 | for (name, dep) in &vals { 68 | if done.contains(name) { 69 | continue; 70 | } 71 | if let Some(dep) = dep { 72 | if !done.contains(dep) { 73 | continue; 74 | } 75 | } 76 | info!("doing {:?} ", name); 77 | done.insert(name.clone()); 78 | res.push(name.clone()); 79 | } 80 | } 81 | res 82 | } 83 | -------------------------------------------------------------------------------- /src/transform/find_duplicate_enums.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::{BTreeMap, BTreeSet}; 4 | 5 | use super::common::*; 6 | use crate::ir::*; 7 | 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub struct FindDuplicateEnums {} 10 | impl FindDuplicateEnums { 11 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 12 | let mut suggested = BTreeSet::new(); 13 | 14 | for (id1, e1) in ir.enums.iter() { 15 | if suggested.contains(&id1) { 16 | continue; 17 | } 18 | 19 | let mut ids = Vec::new(); 20 | for (id2, e2) in ir.enums.iter() { 21 | if id1 != id2 && mergeable_enums(e1, e2) { 22 | ids.push(id2) 23 | } 24 | } 25 | 26 | if !ids.is_empty() { 27 | ids.push(id1); 28 | info!("Duplicated enums:"); 29 | for id in ids { 30 | suggested.insert(id); 31 | info!(" {}", ir.enums.get(id).path); 32 | } 33 | } 34 | } 35 | 36 | Ok(()) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/transform/find_duplicate_fieldsets.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::{BTreeMap, BTreeSet}; 4 | 5 | use super::common::*; 6 | use crate::ir::*; 7 | 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub struct FindDuplicateFieldsets {} 10 | impl FindDuplicateFieldsets { 11 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 12 | let mut suggested = BTreeSet::new(); 13 | 14 | for (id1, fs1) in ir.fieldsets.iter() { 15 | if suggested.contains(&id1) { 16 | continue; 17 | } 18 | 19 | let mut ids = Vec::new(); 20 | for (id2, fs2) in ir.fieldsets.iter() { 21 | if id1 != id2 && check_mergeable_fieldsets(fs1, fs2, CheckLevel::Names).is_ok() { 22 | ids.push(id2) 23 | } 24 | } 25 | 26 | if !ids.is_empty() { 27 | ids.push(id1); 28 | info!("Duplicated fieldsets:"); 29 | for id in ids { 30 | suggested.insert(id); 31 | info!(" {}", ir.fieldsets.get(id).path); 32 | } 33 | } 34 | } 35 | 36 | Ok(()) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/transform/fix_register_bit_sizes.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use crate::ir::*; 4 | 5 | #[derive(Debug, Serialize, Deserialize)] 6 | pub struct FixRegisterBitSizes { 7 | pub create_fieldsets: bool, 8 | } 9 | 10 | impl FixRegisterBitSizes { 11 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 12 | for b in ir.blocks.values_mut() { 13 | for i in &mut b.items { 14 | if let BlockItemInner::Register(r) = &mut i.inner { 15 | let orig_bit_size = r.bit_size; 16 | let good_bit_size = match r.bit_size { 17 | 0..=8 => 8, 18 | 9..=16 => 16, 19 | 17..=32 => 32, 20 | 33..=64 => 64, 21 | 65.. => panic!("Invalid register bit size {}", r.bit_size), 22 | }; 23 | if r.bit_size != good_bit_size { 24 | r.bit_size = good_bit_size; 25 | match &r.fieldset { 26 | None => { 27 | if self.create_fieldsets { 28 | // create a new fieldset, with a single field with the original bit size. 29 | r.fieldset = Some(i.name.clone()); 30 | let fs = FieldSet { 31 | bit_size: good_bit_size, 32 | fields: vec![Field { 33 | name: "val".to_string(), 34 | bit_offset: BitOffset::Regular(0), 35 | bit_size: orig_bit_size, 36 | description: None, 37 | enumm: None, 38 | array: None, 39 | }], 40 | description: None, 41 | extends: None, 42 | }; 43 | if ir.fieldsets.insert(i.name.clone(), fs).is_some() { 44 | panic!("dup fieldset {}", i.name); 45 | } 46 | } 47 | } 48 | Some(fs) => { 49 | // expand the size of the existing fieldset. 50 | let fs = ir.fieldsets.get_mut(fs).unwrap(); 51 | fs.bit_size = good_bit_size; 52 | } 53 | } 54 | } 55 | } 56 | } 57 | } 58 | 59 | Ok(()) 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/transform/make_block.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | use super::common::*; 5 | use crate::ir::*; 6 | 7 | #[derive(Debug, Serialize, Deserialize)] 8 | pub struct MakeBlock { 9 | pub blocks: RegexSet, 10 | pub from: RegexSet, 11 | pub to_outer: String, 12 | pub to_block: String, 13 | pub to_inner: String, 14 | } 15 | 16 | impl MakeBlock { 17 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 18 | for id in match_all(ir.blocks.keys().cloned(), &self.blocks) { 19 | let b = ir.blocks.get_mut(&id).unwrap(); 20 | let groups = match_groups( 21 | b.items.iter().map(|f| f.name.clone()), 22 | &self.from, 23 | &self.to_outer, 24 | ); 25 | for (to, group) in groups { 26 | let b = ir.blocks.get_mut(&id).unwrap(); 27 | info!("blockifizing to {}", to); 28 | 29 | // Grab all items into a vec 30 | let mut items = Vec::new(); 31 | for i in b.items.iter().filter(|i| group.contains(&i.name)) { 32 | items.push(i); 33 | } 34 | 35 | // Sort by offs 36 | items.sort_by_key(|i| i.byte_offset); 37 | for i in &items { 38 | info!(" {}", i.name); 39 | } 40 | 41 | // todo check they're mergeable 42 | // todo check they're not arrays (arrays of arrays not supported) 43 | 44 | let byte_offset = items[0].byte_offset; 45 | 46 | let b2 = Block { 47 | extends: None, 48 | description: None, 49 | items: items 50 | .iter() 51 | .map(|&i| { 52 | let mut i = i.clone(); 53 | i.name = match_expand(&i.name, &self.from, &self.to_inner).unwrap(); 54 | i.byte_offset -= byte_offset; 55 | i 56 | }) 57 | .collect(), 58 | }; 59 | 60 | // TODO if destination block exists, check mergeable 61 | let dest = self.to_block.clone(); // todo regex 62 | ir.blocks.insert(dest.clone(), b2); 63 | 64 | // Remove all items 65 | let b = ir.blocks.get_mut(&id).unwrap(); 66 | b.items.retain(|i| !group.contains(&i.name)); 67 | 68 | // Create the new block item 69 | b.items.push(BlockItem { 70 | name: to, 71 | description: None, 72 | array: None, 73 | byte_offset, 74 | inner: BlockItemInner::Block(BlockItemBlock { block: dest }), 75 | }); 76 | } 77 | } 78 | Ok(()) 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/transform/make_field_array.rs: -------------------------------------------------------------------------------- 1 | use anyhow::bail; 2 | use log::*; 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use super::common::*; 6 | use crate::ir::*; 7 | 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub struct MakeFieldArray { 10 | pub fieldsets: RegexSet, 11 | pub from: RegexSet, 12 | pub to: String, 13 | #[serde(default)] 14 | pub mode: ArrayMode, 15 | } 16 | 17 | impl MakeFieldArray { 18 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 19 | for id in match_all(ir.fieldsets.keys().cloned(), &self.fieldsets) { 20 | let b = ir.fieldsets.get_mut(&id).unwrap(); 21 | let groups = match_groups( 22 | b.fields.iter().map(|f| f.name.clone()), 23 | &self.from, 24 | &self.to, 25 | ); 26 | for (to, group) in groups { 27 | info!("arrayizing to {}", to); 28 | 29 | // Grab all items into a vec 30 | let mut items = Vec::new(); 31 | for i in b.fields.iter().filter(|i| group.contains(&i.name)) { 32 | items.push(i); 33 | } 34 | 35 | // todo check they're mergeable 36 | 37 | // one array shouldn't contain both regular and cursed bit_offset type 38 | { 39 | let has_regular_bit_offset = items 40 | .iter() 41 | .any(|i| matches!(i.bit_offset, BitOffset::Regular(_))); 42 | 43 | let has_cursed_bit_offset = items 44 | .iter() 45 | .any(|i| matches!(i.bit_offset, BitOffset::Cursed(_))); 46 | 47 | if has_regular_bit_offset && has_cursed_bit_offset { 48 | bail!("arrayize: items {} cannot mix bit_offset type", to) 49 | } 50 | } 51 | 52 | // todo check they're not arrays (arrays of arrays not supported) 53 | 54 | // Sort by offs 55 | items.sort_by_key(|i| &i.bit_offset); 56 | for i in &items { 57 | info!(" {}", i.name); 58 | } 59 | 60 | let (offset, array) = calc_array( 61 | items.iter().map(|x| x.bit_offset.min_offset()).collect(), 62 | self.mode, 63 | )?; 64 | 65 | let mut item = items[0].clone(); 66 | 67 | // Remove all 68 | b.fields.retain(|i| !group.contains(&i.name)); 69 | 70 | // Create the new array item 71 | item.name = to; 72 | item.array = Some(array); 73 | item.bit_offset = BitOffset::Regular(offset); 74 | b.fields.push(item); 75 | } 76 | } 77 | Ok(()) 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/transform/make_register_array.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | use super::common::*; 5 | use crate::ir::*; 6 | 7 | #[derive(Debug, Serialize, Deserialize)] 8 | pub struct MakeRegisterArray { 9 | pub blocks: RegexSet, 10 | pub from: RegexSet, 11 | pub to: String, 12 | #[serde(default)] 13 | pub mode: ArrayMode, 14 | } 15 | 16 | impl MakeRegisterArray { 17 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 18 | for id in match_all(ir.blocks.keys().cloned(), &self.blocks) { 19 | let b = ir.blocks.get_mut(&id).unwrap(); 20 | let groups = match_groups(b.items.iter().map(|f| f.name.clone()), &self.from, &self.to); 21 | for (to, group) in groups { 22 | info!("arrayizing to {}", to); 23 | 24 | // Grab all items into a vec 25 | let mut items = Vec::new(); 26 | for i in b.items.iter().filter(|i| group.contains(&i.name)) { 27 | items.push(i); 28 | } 29 | 30 | // todo check they're mergeable 31 | // todo check they're not arrays (arrays of arrays not supported) 32 | 33 | // Sort by offs 34 | items.sort_by_key(|i| i.byte_offset); 35 | for i in &items { 36 | info!(" {}", i.name); 37 | } 38 | 39 | let (offset, array) = 40 | calc_array(items.iter().map(|x| x.byte_offset).collect(), self.mode)?; 41 | 42 | let mut item = items[0].clone(); 43 | 44 | // Remove all 45 | b.items.retain(|i| !group.contains(&i.name)); 46 | 47 | // Create the new array item 48 | item.name = to; 49 | item.array = Some(array); 50 | item.byte_offset = offset; 51 | b.items.push(item); 52 | } 53 | } 54 | Ok(()) 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /src/transform/merge_blocks.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::BTreeSet; 4 | 5 | use super::common::*; 6 | use crate::ir::*; 7 | 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub struct MergeBlocks { 10 | pub from: RegexSet, 11 | pub to: String, 12 | pub main: Option, 13 | #[serde(default)] 14 | pub check: CheckLevel, 15 | } 16 | 17 | impl MergeBlocks { 18 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 19 | let groups = match_groups(ir.blocks.keys().cloned(), &self.from, &self.to); 20 | 21 | for (to, group) in groups { 22 | info!("Merging blocks, dest: {}", to); 23 | for id in &group { 24 | info!(" {}", id); 25 | } 26 | self.merge_blocks(ir, group, to, self.main.as_ref())?; 27 | } 28 | 29 | Ok(()) 30 | } 31 | 32 | fn merge_blocks( 33 | &self, 34 | ir: &mut IR, 35 | ids: BTreeSet, 36 | to: String, 37 | main: Option<&RegexSet>, 38 | ) -> anyhow::Result<()> { 39 | let mut main_id = ids.iter().next().unwrap().clone(); 40 | if let Some(main) = main { 41 | for id in ids.iter() { 42 | if main.is_match(id) { 43 | main_id = id.clone(); 44 | break; 45 | } 46 | } 47 | } 48 | let b = ir.blocks.get(&main_id).unwrap().clone(); 49 | 50 | // todo 51 | //for id in &ids { 52 | // let b2 = ir.blocks.get(id).unwrap(); 53 | // check_mergeable_blocks(&b, b2, self.check)?; 54 | //} 55 | 56 | replace_block_ids(ir, &ids, to.clone()); 57 | for id in &ids { 58 | ir.blocks.remove(id); 59 | } 60 | ir.blocks.insert(to, b); 61 | 62 | Ok(()) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/transform/merge_enums.rs: -------------------------------------------------------------------------------- 1 | use anyhow::bail; 2 | use log::*; 3 | use serde::{Deserialize, Serialize}; 4 | use std::collections::BTreeSet; 5 | 6 | use super::common::*; 7 | use crate::ir::*; 8 | 9 | #[derive(Debug, Serialize, Deserialize)] 10 | pub struct MergeEnums { 11 | pub from: RegexSet, 12 | pub to: String, 13 | pub main: Option, 14 | #[serde(default)] 15 | pub check: CheckLevel, 16 | #[serde(default)] 17 | pub skip_unmergeable: bool, 18 | pub keep_desc: Option, 19 | } 20 | 21 | impl MergeEnums { 22 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 23 | if self.keep_desc.unwrap_or(false) { 24 | let variant_desc = extract_variant_desc(ir, &self.from, None)?; 25 | append_variant_desc_to_field(ir, &variant_desc, None); 26 | } 27 | 28 | let groups = match_groups(ir.enums.keys().cloned(), &self.from, &self.to); 29 | 30 | for (to, group) in groups { 31 | info!("Merging enums, dest: {}", to); 32 | for id in &group { 33 | info!(" {}", id); 34 | } 35 | self.merge_enums(ir, group, to, self.main.as_ref())?; 36 | } 37 | 38 | Ok(()) 39 | } 40 | 41 | fn merge_enums( 42 | &self, 43 | ir: &mut IR, 44 | ids: BTreeSet, 45 | to: String, 46 | main: Option<&RegexSet>, 47 | ) -> anyhow::Result<()> { 48 | let mut main_id = ids.iter().next().unwrap().clone(); 49 | if let Some(main) = main { 50 | for id in ids.iter() { 51 | if main.is_match(id) { 52 | main_id = id.clone(); 53 | break; 54 | } 55 | } 56 | } 57 | let e = ir.enums.get(&main_id).unwrap().clone(); 58 | 59 | for id in &ids { 60 | let e2 = ir.enums.get(id).unwrap(); 61 | if let Err(e) = check_mergeable_enums(&main_id, &e, id, e2, self.check) { 62 | if self.skip_unmergeable { 63 | info!("skipping: {:?}", to); 64 | return Ok(()); 65 | } else { 66 | return Err(e); 67 | } 68 | } 69 | } 70 | for id in &ids { 71 | ir.enums.remove(id); 72 | } 73 | 74 | assert!(ir.enums.insert(to.clone(), e).is_none()); 75 | replace_enum_ids(ir, &ids, to); 76 | 77 | Ok(()) 78 | } 79 | } 80 | 81 | fn check_mergeable_enums( 82 | a_id: &str, 83 | a: &Enum, 84 | b_id: &str, 85 | b: &Enum, 86 | level: CheckLevel, 87 | ) -> anyhow::Result<()> { 88 | if let Err(e) = check_mergeable_enums_inner(a, b, level) { 89 | bail!("Cannot merge enums.\nfirst: {a_id}\n{a:#?}\nsecond: {b_id}\n{b:#?}\ncause: {e:?}",) 90 | } 91 | Ok(()) 92 | } 93 | 94 | fn check_mergeable_enums_inner(a: &Enum, b: &Enum, level: CheckLevel) -> anyhow::Result<()> { 95 | if a.bit_size != b.bit_size { 96 | bail!("Different bit size: {} vs {}", a.bit_size, b.bit_size) 97 | } 98 | 99 | if level >= CheckLevel::Layout { 100 | if a.variants.len() != b.variants.len() { 101 | bail!("Different variant count") 102 | } 103 | 104 | let mut aok = [false; 1024]; 105 | let mut bok = [false; 1024]; 106 | 107 | for (ia, fa) in a.variants.iter().enumerate() { 108 | if let Some((ib, _fb)) = b 109 | .variants 110 | .iter() 111 | .enumerate() 112 | .find(|(ib, fb)| !bok[*ib] && mergeable_variants(fa, fb, level)) 113 | { 114 | aok[ia] = true; 115 | bok[ib] = true; 116 | } else { 117 | bail!("Variant in first enum has no match: {:?}", fa); 118 | } 119 | } 120 | } 121 | 122 | Ok(()) 123 | } 124 | 125 | fn mergeable_variants(a: &EnumVariant, b: &EnumVariant, level: CheckLevel) -> bool { 126 | let mut res = true; 127 | if level >= CheckLevel::Layout { 128 | res &= a.value == b.value; 129 | } 130 | if level >= CheckLevel::Names { 131 | res &= a.name == b.name; 132 | } 133 | if level >= CheckLevel::Descriptions { 134 | res &= a.description == b.description; 135 | } 136 | res 137 | } 138 | -------------------------------------------------------------------------------- /src/transform/merge_fieldsets.rs: -------------------------------------------------------------------------------- 1 | use log::*; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::BTreeSet; 4 | 5 | use super::common::*; 6 | use crate::ir::*; 7 | 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub struct MergeFieldsets { 10 | pub from: RegexSet, 11 | pub to: String, 12 | pub main: Option, 13 | #[serde(default)] 14 | pub check: CheckLevel, 15 | } 16 | 17 | impl MergeFieldsets { 18 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 19 | let groups = match_groups(ir.fieldsets.keys().cloned(), &self.from, &self.to); 20 | 21 | for (to, group) in groups { 22 | info!("Merging fieldsets, dest: {}", to); 23 | for id in &group { 24 | info!(" {}", id); 25 | } 26 | self.merge_fieldsets(ir, group, to, self.main.as_ref())?; 27 | } 28 | 29 | Ok(()) 30 | } 31 | 32 | fn merge_fieldsets( 33 | &self, 34 | ir: &mut IR, 35 | ids: BTreeSet, 36 | to: String, 37 | main: Option<&RegexSet>, 38 | ) -> anyhow::Result<()> { 39 | let mut main_id = ids.iter().next().unwrap().clone(); 40 | if let Some(main) = main { 41 | for id in ids.iter() { 42 | if main.is_match(id) { 43 | main_id = id.clone(); 44 | break; 45 | } 46 | } 47 | } 48 | let fs = ir.fieldsets.get(&main_id).unwrap().clone(); 49 | 50 | for id in &ids { 51 | let fs2 = ir.fieldsets.get(id).unwrap(); 52 | check_mergeable_fieldsets(&main_id, &fs, id, fs2, self.check)?; 53 | } 54 | 55 | for id in &ids { 56 | ir.fieldsets.remove(id); 57 | } 58 | 59 | assert!(ir.fieldsets.insert(to.clone(), fs).is_none()); 60 | replace_fieldset_ids(ir, &ids, to); 61 | 62 | Ok(()) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/transform/mod.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::collections::{BTreeMap, HashSet}; 3 | use std::mem::take; 4 | 5 | use crate::ir::*; 6 | 7 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 8 | pub enum NameKind { 9 | Device, 10 | DevicePeripheral, 11 | DeviceInterrupt, 12 | Block, 13 | BlockItem, 14 | Fieldset, 15 | Field, 16 | Enum, 17 | EnumVariant, 18 | } 19 | 20 | #[derive(PartialEq, Eq, Hash)] 21 | struct NameCollisionError { 22 | kind: NameKind, 23 | old: String, 24 | new: String, 25 | } 26 | 27 | impl NameCollisionError { 28 | fn new(kind: NameKind, old: String, new: String) -> Self { 29 | Self { kind, old, new } 30 | } 31 | } 32 | 33 | impl std::fmt::Debug for NameCollisionError { 34 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 35 | write!( 36 | f, 37 | "Err: on rename {:?} \"{}\", new name \"{}\" already exist", 38 | self.kind, self.old, self.new 39 | ) 40 | } 41 | } 42 | 43 | struct NameCollisionErrors(HashSet); 44 | 45 | impl std::fmt::Debug for NameCollisionErrors { 46 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 47 | if !self.0.is_empty() { 48 | writeln!(f)? 49 | } 50 | 51 | for err in self.0.iter() { 52 | writeln!(f, "{:?}", err)? 53 | } 54 | Ok(()) 55 | } 56 | } 57 | 58 | fn rename_opt(s: &mut Option, f: impl Fn(&mut String)) { 59 | if let Some(s) = s { 60 | f(s) 61 | } 62 | } 63 | 64 | pub fn map_block_names(ir: &mut IR, f: impl Fn(&mut String)) { 65 | remap_names(NameKind::Block, &mut ir.blocks, &f).unwrap(); 66 | 67 | for (_, d) in ir.devices.iter_mut() { 68 | for p in &mut d.peripherals { 69 | rename_opt(&mut p.block, &f); 70 | } 71 | } 72 | 73 | for (_, b) in ir.blocks.iter_mut() { 74 | for i in b.items.iter_mut() { 75 | match &mut i.inner { 76 | BlockItemInner::Block(p) => f(&mut p.block), 77 | BlockItemInner::Register(_r) => {} 78 | } 79 | } 80 | } 81 | } 82 | 83 | pub fn map_fieldset_names(ir: &mut IR, f: impl Fn(&mut String)) { 84 | remap_names(NameKind::Fieldset, &mut ir.fieldsets, &f).unwrap(); 85 | 86 | for (_, b) in ir.blocks.iter_mut() { 87 | for i in b.items.iter_mut() { 88 | match &mut i.inner { 89 | BlockItemInner::Block(_p) => {} 90 | BlockItemInner::Register(r) => rename_opt(&mut r.fieldset, &f), 91 | } 92 | } 93 | } 94 | } 95 | 96 | pub fn map_enum_names(ir: &mut IR, f: impl Fn(&mut String)) { 97 | remap_names(NameKind::Enum, &mut ir.enums, &f).unwrap(); 98 | 99 | for (_, fs) in ir.fieldsets.iter_mut() { 100 | for ff in fs.fields.iter_mut() { 101 | rename_opt(&mut ff.enumm, &f); 102 | } 103 | } 104 | } 105 | 106 | pub fn map_device_names(ir: &mut IR, f: impl Fn(&mut String)) { 107 | remap_names(NameKind::Device, &mut ir.devices, &f).unwrap(); 108 | } 109 | 110 | pub fn map_device_interrupt_names(ir: &mut IR, f: impl Fn(&mut String)) { 111 | for (_, d) in ir.devices.iter_mut() { 112 | for i in &mut d.interrupts { 113 | f(&mut i.name); 114 | } 115 | } 116 | } 117 | 118 | pub fn map_device_peripheral_names(ir: &mut IR, f: impl Fn(&mut String)) { 119 | for (_, d) in ir.devices.iter_mut() { 120 | for p in &mut d.peripherals { 121 | f(&mut p.name); 122 | } 123 | } 124 | } 125 | 126 | pub fn map_block_item_names(ir: &mut IR, f: impl Fn(&mut String)) { 127 | for (_, b) in ir.blocks.iter_mut() { 128 | for i in b.items.iter_mut() { 129 | f(&mut i.name) 130 | } 131 | } 132 | } 133 | 134 | pub fn map_field_names(ir: &mut IR, f: impl Fn(&mut String)) { 135 | for (_, fs) in ir.fieldsets.iter_mut() { 136 | for ff in fs.fields.iter_mut() { 137 | f(&mut ff.name) 138 | } 139 | } 140 | } 141 | 142 | pub fn map_enum_variant_names(ir: &mut IR, f: impl Fn(&mut String)) { 143 | for (_, e) in ir.enums.iter_mut() { 144 | for v in e.variants.iter_mut() { 145 | f(&mut v.name) 146 | } 147 | } 148 | } 149 | 150 | pub fn map_names(ir: &mut IR, f: impl Fn(NameKind, &mut String)) { 151 | map_device_names(ir, |s| f(NameKind::Device, s)); 152 | map_device_peripheral_names(ir, |s| f(NameKind::DevicePeripheral, s)); 153 | map_device_interrupt_names(ir, |s| f(NameKind::DeviceInterrupt, s)); 154 | map_block_names(ir, |s| f(NameKind::Block, s)); 155 | map_block_item_names(ir, |s| f(NameKind::BlockItem, s)); 156 | map_fieldset_names(ir, |s| f(NameKind::Fieldset, s)); 157 | map_field_names(ir, |s| f(NameKind::Field, s)); 158 | map_enum_names(ir, |s| f(NameKind::Enum, s)); 159 | map_enum_variant_names(ir, |s| f(NameKind::EnumVariant, s)); 160 | } 161 | 162 | pub fn map_descriptions(ir: &mut IR, mut ff: impl FnMut(&str) -> String) -> anyhow::Result<()> { 163 | let mut mapit = |d: &mut Option| { 164 | *d = d.as_ref().map(|p| ff(p)); 165 | }; 166 | 167 | for (_, b) in ir.blocks.iter_mut() { 168 | mapit(&mut b.description); 169 | for i in b.items.iter_mut() { 170 | mapit(&mut i.description); 171 | } 172 | } 173 | 174 | for (_, fs) in ir.fieldsets.iter_mut() { 175 | mapit(&mut fs.description); 176 | for f in fs.fields.iter_mut() { 177 | mapit(&mut f.description); 178 | } 179 | } 180 | 181 | for (_, e) in ir.enums.iter_mut() { 182 | mapit(&mut e.description); 183 | for v in e.variants.iter_mut() { 184 | mapit(&mut v.description); 185 | } 186 | } 187 | 188 | Ok(()) 189 | } 190 | 191 | fn remap_names( 192 | kind: NameKind, 193 | x: &mut BTreeMap, 194 | f: impl Fn(&mut String), 195 | ) -> Result<(), NameCollisionErrors> { 196 | let mut res = BTreeMap::new(); 197 | let mut errs = HashSet::new(); 198 | 199 | for (mut name, val) in take(x) { 200 | let orginal_name = name.clone(); 201 | f(&mut name); 202 | if res.insert(name.clone(), val).is_some() { 203 | errs.insert(NameCollisionError::new(kind, orginal_name, name)); 204 | } 205 | } 206 | 207 | if !errs.is_empty() { 208 | return Err(NameCollisionErrors(errs)); 209 | } 210 | 211 | *x = res; 212 | Ok(()) 213 | } 214 | 215 | mod common; 216 | 217 | macro_rules! transforms { 218 | ($($mod:ident::$struct:ident,)*) => { 219 | $( pub mod $mod; )* 220 | 221 | #[derive(Debug, Serialize, Deserialize)] 222 | pub enum Transform { 223 | $( $struct($mod::$struct), )* 224 | } 225 | 226 | impl Transform { 227 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 228 | match self { 229 | $( Self::$struct(t) => t.run(ir), )* 230 | } 231 | } 232 | } 233 | }; 234 | } 235 | 236 | transforms!( 237 | sanitize::Sanitize, 238 | sort::Sort, 239 | add::Add, 240 | add_enum_variants::AddEnumVariants, 241 | add_fields::AddFields, 242 | add_registers::AddRegisters, 243 | add_interrupts::AddInterrupts, 244 | delete::Delete, 245 | delete_enum_variants::DeleteEnumVariants, 246 | delete_enums::DeleteEnums, 247 | delete_enums_with_variants::DeleteEnumsWithVariants, 248 | delete_enums_used_in::DeleteEnumsUsedIn, 249 | delete_useless_enums::DeleteUselessEnums, 250 | delete_fields::DeleteFields, 251 | delete_fieldsets::DeleteFieldsets, 252 | delete_peripherals::DeletePeripherals, 253 | delete_registers::DeleteRegisters, 254 | expand_extends::ExpandExtends, 255 | merge_blocks::MergeBlocks, 256 | merge_enums::MergeEnums, 257 | merge_fieldsets::MergeFieldsets, 258 | rename::Rename, 259 | rename_fields::RenameFields, 260 | rename_registers::RenameRegisters, 261 | rename_enum_variants::RenameEnumVariants, 262 | resize_enums::ResizeEnums, 263 | make_register_array::MakeRegisterArray, 264 | make_field_array::MakeFieldArray, 265 | make_block::MakeBlock, 266 | modify_byte_offset::ModifyByteOffset, 267 | modify_fields_enum::ModifyFieldsEnum, 268 | fix_register_bit_sizes::FixRegisterBitSizes, 269 | rename_interrupts::RenameInterrupts, 270 | rename_peripherals::RenamePeripherals, 271 | ); 272 | -------------------------------------------------------------------------------- /src/transform/modify_byte_offset.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct ModifyByteOffset { 8 | pub blocks: RegexSet, 9 | pub exclude_items: Option, 10 | pub add_offset: i32, 11 | pub strict: Option, // if this value is false, bypass overflowed/underflowed modification 12 | } 13 | 14 | impl ModifyByteOffset { 15 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 16 | let strict = self.strict.unwrap_or_default(); 17 | 18 | let mut err_names = Vec::new(); 19 | 20 | for id in match_all(ir.blocks.keys().cloned(), &self.blocks) { 21 | let b = ir.blocks.get_mut(&id).unwrap(); 22 | for i in &mut b.items { 23 | if let Some(exclude) = &self.exclude_items { 24 | if exclude.is_match(&i.name) { 25 | continue; 26 | } 27 | } 28 | 29 | match i.byte_offset.checked_add_signed(self.add_offset) { 30 | Some(new_offset) => i.byte_offset = new_offset, 31 | None if strict => err_names.push((id.clone(), i.name.clone())), 32 | None => (), 33 | }; 34 | } 35 | } 36 | 37 | if !err_names.is_empty() { 38 | let mut err_msg = String::new(); 39 | 40 | for e_name in err_names { 41 | err_msg.push_str(&format!( 42 | "Block: {} Item: {}: byte_offset out of range after modify\n", 43 | e_name.0, e_name.1 44 | )); 45 | } 46 | 47 | panic!("{err_msg}") 48 | } 49 | 50 | Ok(()) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/transform/modify_fields_enum.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct ModifyFieldsEnum { 8 | pub fieldset: RegexSet, 9 | pub field: RegexSet, 10 | #[serde(rename = "enum")] 11 | pub enumm: RegexSet, 12 | } 13 | 14 | impl ModifyFieldsEnum { 15 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 16 | let matched_enums = match_all(ir.enums.keys().cloned(), &self.enumm); 17 | if matched_enums.len() != 1 { 18 | anyhow::bail!( 19 | "Expected exactly one enum to match, found {}", 20 | matched_enums.len() 21 | ); 22 | } 23 | let enum_id = matched_enums.first().unwrap().clone(); 24 | 25 | for id in match_all(ir.fieldsets.keys().cloned(), &self.fieldset) { 26 | let fs = ir.fieldsets.get_mut(&id).unwrap(); 27 | fs.fields 28 | .iter_mut() 29 | .filter(|f| self.field.is_match(&f.name)) 30 | .for_each(|f| f.enumm = Some(enum_id.clone())); 31 | } 32 | 33 | Ok(()) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/transform/rename.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub enum RenameType { 8 | All, 9 | Device, 10 | Block, 11 | Fieldset, 12 | Enum, 13 | } 14 | 15 | impl Default for RenameType { 16 | fn default() -> Self { 17 | RenameType::All 18 | } 19 | } 20 | 21 | #[derive(Debug, Serialize, Deserialize)] 22 | pub struct Rename { 23 | pub from: RegexSet, 24 | pub to: String, 25 | pub r#type: RenameType, 26 | } 27 | 28 | impl Rename { 29 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 30 | let renamer = |name: &mut String| { 31 | if let Some(res) = match_expand(name, &self.from, &self.to) { 32 | *name = res 33 | } 34 | }; 35 | 36 | match self.r#type { 37 | RenameType::All => { 38 | super::map_device_names(ir, renamer); 39 | super::map_block_names(ir, renamer); 40 | super::map_fieldset_names(ir, renamer); 41 | super::map_enum_names(ir, renamer); 42 | } 43 | RenameType::Device => super::map_device_names(ir, renamer), 44 | RenameType::Block => super::map_block_names(ir, renamer), 45 | RenameType::Fieldset => super::map_fieldset_names(ir, renamer), 46 | RenameType::Enum => super::map_enum_names(ir, renamer), 47 | } 48 | 49 | Ok(()) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/transform/rename_enum_variants.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct RenameEnumVariants { 8 | #[serde(rename = "enum")] 9 | pub enumm: RegexSet, 10 | pub from: RegexSet, 11 | pub to: String, 12 | } 13 | 14 | impl RenameEnumVariants { 15 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 16 | for id in match_all(ir.enums.keys().cloned(), &self.enumm) { 17 | let e = ir.enums.get_mut(&id).unwrap(); 18 | for i in &mut e.variants { 19 | if let Some(name) = match_expand(&i.name, &self.from, &self.to) { 20 | i.name = name; 21 | } 22 | } 23 | } 24 | Ok(()) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/transform/rename_fields.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct RenameFields { 8 | pub fieldset: RegexSet, 9 | pub from: RegexSet, 10 | pub to: String, 11 | } 12 | 13 | impl RenameFields { 14 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 15 | for id in match_all(ir.fieldsets.keys().cloned(), &self.fieldset) { 16 | let fs = ir.fieldsets.get_mut(&id).unwrap(); 17 | for f in &mut fs.fields { 18 | if let Some(name) = match_expand(&f.name, &self.from, &self.to) { 19 | f.name = name; 20 | } 21 | } 22 | } 23 | Ok(()) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/transform/rename_interrupts.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct RenameInterrupts { 8 | pub from: RegexSet, 9 | pub to: String, 10 | } 11 | 12 | impl RenameInterrupts { 13 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 14 | for d in ir.devices.values_mut() { 15 | for i in &mut d.interrupts { 16 | if let Some(name) = match_expand(&i.name, &self.from, &self.to) { 17 | i.name = name; 18 | } 19 | } 20 | } 21 | Ok(()) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/transform/rename_peripherals.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct RenamePeripherals { 8 | pub from: RegexSet, 9 | pub to: String, 10 | } 11 | 12 | impl RenamePeripherals { 13 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 14 | for d in ir.devices.values_mut() { 15 | for p in &mut d.peripherals { 16 | if let Some(name) = match_expand(&p.name, &self.from, &self.to) { 17 | p.name = name; 18 | } 19 | } 20 | } 21 | Ok(()) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/transform/rename_registers.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::common::*; 4 | use crate::ir::*; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct RenameRegisters { 8 | pub block: RegexSet, 9 | pub from: RegexSet, 10 | pub to: String, 11 | } 12 | 13 | impl RenameRegisters { 14 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 15 | for id in match_all(ir.blocks.keys().cloned(), &self.block) { 16 | let b = ir.blocks.get_mut(&id).unwrap(); 17 | for i in &mut b.items { 18 | if let Some(name) = match_expand(&i.name, &self.from, &self.to) { 19 | i.name = name; 20 | } 21 | } 22 | } 23 | Ok(()) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/transform/resize_enums.rs: -------------------------------------------------------------------------------- 1 | 2 | 3 | use anyhow::Context; 4 | use serde::{Deserialize, Serialize}; 5 | 6 | use crate::ir::*; 7 | 8 | use super::common::{match_all, RegexSet}; 9 | 10 | #[derive(Debug, Serialize, Deserialize)] 11 | pub struct ResizeEnums { 12 | #[serde(rename = "enum")] 13 | emumm: RegexSet, 14 | bit_size: u32, 15 | } 16 | 17 | impl ResizeEnums { 18 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 19 | let ids = match_all(ir.enums.keys().cloned(), &self.emumm); 20 | 21 | if self.bit_size == 0 { 22 | panic!("Cannot resize an enum to 0 bits (delete the enum?)"); 23 | } 24 | 25 | // Resize the enums 26 | for enumm in ids.iter() { 27 | log::info!("Resizing enum {} to {} bits", enumm, self.bit_size); 28 | 29 | let enumm = ir.enums.get_mut(enumm).unwrap(); 30 | enumm.bit_size = self.bit_size; 31 | } 32 | 33 | for enumm in ids.iter() { 34 | verify_variants(ir, enumm)?; 35 | update_uses(ir, enumm)?; 36 | } 37 | 38 | Ok(()) 39 | } 40 | } 41 | 42 | /// Verify all enum variants fit within the bit size of the enum after resize. 43 | fn verify_variants(ir: &IR, enumm: &str) -> anyhow::Result<()> { 44 | let e = ir.enums.get(enumm).unwrap(); 45 | let max_value = 2_u64.checked_pow(e.bit_size) 46 | .with_context(|| format!("Bit size is too large"))? 47 | .checked_sub(1) 48 | .with_context(|| format!("New bit size is invalid: {}", e.bit_size))?; 49 | let mut error = false; 50 | 51 | for variant in e.variants.iter() { 52 | if variant.value > max_value { 53 | log::error!( 54 | "{}::{} (value: {}) is out of range as a result of resize to {} bits", 55 | enumm, variant.name, variant.value, e.bit_size 56 | ); 57 | error |= true; 58 | } 59 | } 60 | 61 | if error { 62 | panic!(); 63 | } 64 | 65 | Ok(()) 66 | } 67 | 68 | fn update_uses(ir: &mut IR, enumm: &str) -> anyhow::Result<()> { 69 | let fieldsets = ir 70 | .fieldsets 71 | .iter() 72 | .filter(|(_, fs)| fs.fields.iter().any(|f| f.enumm.as_deref() == Some(enumm))) 73 | .map(|(name, _)| name) 74 | .cloned() 75 | .collect::>(); 76 | 77 | let bit_size = ir.enums.get(enumm).unwrap().bit_size; 78 | 79 | for fs_name in fieldsets { 80 | let fs = ir.fieldsets.get_mut(&fs_name).unwrap(); 81 | 82 | for field in fs 83 | .fields 84 | .iter_mut() 85 | .filter(|f| f.enumm.as_deref() == Some(enumm)) 86 | { 87 | field.bit_size = bit_size; 88 | } 89 | 90 | let mut error = false; 91 | 92 | // Verify there are no overlapping fields after resizing enums. 93 | for (i1, i2) in Pairs::new(fs.fields.iter()) { 94 | // expand every BitOffset to a Vec, 95 | // and compare at that level 96 | 'COMPARE: for i1_range in i1.bit_offset.clone().into_ranges(i1.bit_size) { 97 | for i2_range in i2.bit_offset.clone().into_ranges(i2.bit_size) { 98 | if i2_range.end() > i1_range.start() && i1_range.end() > i2_range.start() { 99 | log::error!( 100 | "fieldset {}: fields overlap: {} {}", 101 | fs_name, i1.name, i2.name 102 | ); 103 | error |= true; 104 | break 'COMPARE; 105 | } 106 | } 107 | } 108 | } 109 | 110 | if error { 111 | panic!(); 112 | } 113 | } 114 | 115 | Ok(()) 116 | } 117 | 118 | struct Pairs { 119 | head: Option, 120 | tail: U, 121 | next: U, 122 | } 123 | 124 | impl Pairs { 125 | fn new(mut iter: U) -> Self { 126 | let head = iter.next(); 127 | Pairs { 128 | head, 129 | tail: iter.clone(), 130 | next: iter, 131 | } 132 | } 133 | } 134 | 135 | impl Iterator for Pairs 136 | where 137 | U::Item: Clone, 138 | { 139 | type Item = (U::Item, U::Item); 140 | 141 | fn next(&mut self) -> Option { 142 | let a = self.head.as_ref()?.clone(); 143 | 144 | if let Some(b) = self.tail.next() { 145 | return Some((a, b)); 146 | } 147 | 148 | match self.next.next() { 149 | Some(new_head) => { 150 | self.head = Some(new_head); 151 | self.tail = self.next.clone(); 152 | self.next() 153 | } 154 | None => None, 155 | } 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /src/transform/sanitize.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use crate::util::StringExt; 4 | 5 | use super::{map_names, NameKind, IR}; 6 | 7 | #[derive(Debug, Serialize, Deserialize)] 8 | pub struct Sanitize {} 9 | 10 | impl Sanitize { 11 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 12 | map_names(ir, |k, p| match k { 13 | NameKind::Device => *p = sanitize_path(p), 14 | NameKind::DevicePeripheral => *p = p.to_sanitized_constant_case().to_string(), 15 | NameKind::DeviceInterrupt => *p = p.to_sanitized_constant_case().to_string(), 16 | NameKind::Block => *p = sanitize_path(p), 17 | NameKind::Fieldset => *p = sanitize_path(p), 18 | NameKind::Enum => *p = sanitize_path(p), 19 | NameKind::BlockItem => *p = p.to_sanitized_snake_case().to_string(), 20 | NameKind::Field => *p = p.to_sanitized_snake_case().to_string(), 21 | NameKind::EnumVariant => *p = p.to_sanitized_constant_case().to_string(), 22 | }); 23 | Ok(()) 24 | } 25 | } 26 | 27 | fn sanitize_path(p: &str) -> String { 28 | let v = p.split("::").collect::>(); 29 | let len = v.len(); 30 | v.into_iter() 31 | .enumerate() 32 | .map(|(i, s)| { 33 | if i == len - 1 { 34 | s.to_sanitized_pascal_case() 35 | } else { 36 | s.to_sanitized_snake_case() 37 | } 38 | }) 39 | .collect::>() 40 | .join("::") 41 | } 42 | -------------------------------------------------------------------------------- /src/transform/sort.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use crate::ir::*; 4 | 5 | #[derive(Debug, Serialize, Deserialize)] 6 | pub struct Sort {} 7 | 8 | impl Sort { 9 | pub fn run(&self, ir: &mut IR) -> anyhow::Result<()> { 10 | for z in ir.blocks.values_mut() { 11 | z.items.sort_by_key(|i| (i.byte_offset, i.name.clone())) 12 | } 13 | for z in ir.fieldsets.values_mut() { 14 | z.fields 15 | .sort_by_key(|i| (i.bit_offset.clone(), i.name.clone())) 16 | } 17 | for z in ir.enums.values_mut() { 18 | z.variants.sort_by_key(|i| (i.value, i.name.clone())) 19 | } 20 | 21 | Ok(()) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/util.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{anyhow, Result}; 2 | use inflections::Inflect; 3 | use proc_macro2::{Ident, Literal, Span, TokenStream}; 4 | use quote::{quote, ToTokens}; 5 | use std::str::FromStr; 6 | 7 | pub const BITS_PER_BYTE: u32 = 8; 8 | 9 | /// List of chars that some vendors use in their peripheral/field names but 10 | /// that are not valid in Rust ident 11 | const INVALID_CHARS: &[char] = &['(', ')', '[', ']', '/', ' ', '-']; 12 | 13 | static KEYWORDS: &[&str] = &[ 14 | "abstract", "as", "async", "await", "become", "box", "break", "const", "continue", "crate", 15 | "do", "dyn", "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", 16 | "let", "loop", "macro", "match", "mod", "move", "mut", "override", "priv", "pub", "ref", 17 | "return", "self", "Self", "static", "struct", "super", "trait", "true", "try", "type", 18 | "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield", 19 | ]; 20 | 21 | /// Make `s` a valid identifier, making the minimal changes (no case changes) 22 | fn sanitize_ident(s: String) -> String { 23 | let mut s = s.replace(INVALID_CHARS, ""); 24 | if KEYWORDS.contains(&&*s) { 25 | s.push('_'); 26 | s 27 | } else if s.starts_with(char::is_numeric) { 28 | format!("_{}", s) 29 | } else { 30 | s 31 | } 32 | } 33 | 34 | pub trait StringExt { 35 | fn to_sanitized_pascal_case(&self) -> String; 36 | fn to_sanitized_upper_case(&self) -> String; 37 | fn to_sanitized_constant_case(&self) -> String; 38 | fn to_sanitized_snake_case(&self) -> String; 39 | } 40 | 41 | impl StringExt for str { 42 | fn to_sanitized_snake_case(&self) -> String { 43 | sanitize_ident(self.to_snake_case()) 44 | } 45 | 46 | fn to_sanitized_upper_case(&self) -> String { 47 | sanitize_ident(self.to_upper_case()) 48 | } 49 | 50 | fn to_sanitized_constant_case(&self) -> String { 51 | sanitize_ident(self.to_constant_case()) 52 | } 53 | 54 | fn to_sanitized_pascal_case(&self) -> String { 55 | sanitize_ident(self.to_pascal_case()) 56 | } 57 | } 58 | 59 | pub fn respace(s: &str) -> String { 60 | s.split_whitespace().collect::>().join(" ") 61 | } 62 | 63 | pub fn escape_brackets(s: &str) -> String { 64 | s.split('[') 65 | .fold("".to_string(), |acc, x| { 66 | if acc.is_empty() { 67 | x.to_string() 68 | } else if acc.ends_with('\\') { 69 | acc + "[" + x 70 | } else { 71 | acc + "\\[" + x 72 | } 73 | }) 74 | .split(']') 75 | .fold("".to_string(), |acc, x| { 76 | if acc.is_empty() { 77 | x.to_string() 78 | } else if acc.ends_with('\\') { 79 | acc + "]" + x 80 | } else { 81 | acc + "\\]" + x 82 | } 83 | }) 84 | } 85 | 86 | pub fn replace_suffix(name: &str, suffix: &str) -> String { 87 | if name.contains("[%s]") { 88 | name.replace("[%s]", suffix) 89 | } else { 90 | name.replace("%s", suffix) 91 | } 92 | } 93 | 94 | pub fn hex_str(n: u64) -> String { 95 | let (h4, h3, h2, h1) = ( 96 | (n >> 48) & 0xffff, 97 | (n >> 32) & 0xffff, 98 | (n >> 16) & 0xffff, 99 | n & 0xffff, 100 | ); 101 | if h4 != 0 { 102 | format!("0x{:04x}_{:04x}_{:04x}_{:04x}", h4, h3, h2, h1) 103 | } else if h3 != 0 { 104 | format!("0x{:04x}_{:04x}_{:04x}", h3, h2, h1) 105 | } else if h2 != 0 { 106 | format!("0x{:04x}_{:04x}", h2, h1) 107 | } else if h1 & 0xff00 != 0 { 108 | format!("0x{:04x}", h1) 109 | } else if h1 != 0 { 110 | format!("0x{:02x}", h1 & 0xff) 111 | } else { 112 | "0x0".to_string() 113 | } 114 | } 115 | 116 | /// Turns `n` into an unsuffixed separated hex token 117 | pub fn hex(n: u64) -> TokenStream { 118 | TokenStream::from_str(&hex_str(n)).unwrap() 119 | } 120 | 121 | /// Turns `n` into an unsuffixed separated hex token 122 | pub fn hex_usize(n: u64) -> TokenStream { 123 | TokenStream::from_str(&format!("{}usize", hex_str(n))).unwrap() 124 | } 125 | 126 | /// Turns `n` into an unsuffixed token 127 | pub fn unsuffixed(n: u64) -> TokenStream { 128 | Literal::u64_unsuffixed(n).into_token_stream() 129 | } 130 | 131 | pub fn unsuffixed_or_bool(n: u64, width: u32) -> TokenStream { 132 | if width == 1 { 133 | Ident::new(if n == 0 { "false" } else { "true" }, Span::call_site()).into_token_stream() 134 | } else { 135 | unsuffixed(n) 136 | } 137 | } 138 | 139 | pub trait U32Ext { 140 | fn to_ty(&self) -> Result; 141 | fn to_ty_width(&self) -> Result; 142 | } 143 | 144 | impl U32Ext for u32 { 145 | fn to_ty(&self) -> Result { 146 | Ok(Ident::new( 147 | match *self { 148 | 1 => "bool", 149 | 2..=8 => "u8", 150 | 9..=16 => "u16", 151 | 17..=32 => "u32", 152 | 33..=64 => "u64", 153 | _ => { 154 | return Err(anyhow!( 155 | "can't convert {} bits into a Rust integral type", 156 | *self 157 | )) 158 | } 159 | }, 160 | Span::call_site(), 161 | )) 162 | } 163 | 164 | fn to_ty_width(&self) -> Result { 165 | Ok(match *self { 166 | 1 => 1, 167 | 2..=8 => 8, 168 | 9..=16 => 16, 169 | 17..=32 => 32, 170 | 33..=64 => 64, 171 | _ => { 172 | return Err(anyhow!( 173 | "can't convert {} bits into a Rust integral type width", 174 | *self 175 | )) 176 | } 177 | }) 178 | } 179 | } 180 | 181 | pub fn build_rs() -> TokenStream { 182 | quote! { 183 | use std::env; 184 | use std::fs::File; 185 | use std::io::Write; 186 | use std::path::PathBuf; 187 | 188 | fn main() { 189 | if env::var_os("CARGO_FEATURE_RT").is_some() { 190 | // Put the linker script somewhere the linker can find it 191 | let out = &PathBuf::from(env::var_os("OUT_DIR").unwrap()); 192 | File::create(out.join("device.x")) 193 | .unwrap() 194 | .write_all(include_bytes!("device.x")) 195 | .unwrap(); 196 | println!("cargo:rustc-link-search={}", out.display()); 197 | 198 | println!("cargo:rerun-if-changed=device.x"); 199 | } 200 | 201 | println!("cargo:rerun-if-changed=build.rs"); 202 | } 203 | } 204 | } 205 | 206 | /// Return a relative path to access a from b. 207 | pub fn relative_path(a: &str, b: &str) -> TokenStream { 208 | let a: Vec<&str> = a.split("::").collect(); 209 | let b: Vec<&str> = b.split("::").collect(); 210 | 211 | let mut ma = &a[..a.len() - 1]; 212 | let mut mb = &b[..b.len() - 1]; 213 | while !ma.is_empty() && !mb.is_empty() && ma[0] == mb[0] { 214 | ma = &ma[1..]; 215 | mb = &mb[1..]; 216 | } 217 | 218 | let mut res = TokenStream::new(); 219 | 220 | // for each item left in b, append a `super` 221 | for _ in mb { 222 | res.extend(quote!(super::)); 223 | } 224 | 225 | // for each item in a, append it 226 | for ident in ma { 227 | let ident = Ident::new(ident, Span::call_site()); 228 | res.extend(quote!(#ident::)); 229 | } 230 | 231 | let ident = Ident::new(a[a.len() - 1], Span::call_site()); 232 | res.extend(quote!(#ident)); 233 | 234 | res 235 | } 236 | 237 | pub fn doc(doc: &Option) -> TokenStream { 238 | if let Some(doc) = doc { 239 | let doc = doc.replace("\\n", "\n"); 240 | let doc = respace(&doc); 241 | let doc = escape_brackets(&doc); 242 | quote!(#[doc=#doc]) 243 | } else { 244 | quote!() 245 | } 246 | } 247 | -------------------------------------------------------------------------------- /src/validate.rs: -------------------------------------------------------------------------------- 1 | use std::{cmp::Ordering, collections::BTreeSet}; 2 | 3 | use crate::ir::{BitOffset, BlockItemInner, IR}; 4 | 5 | #[derive(Debug, Clone)] 6 | pub struct Options { 7 | pub allow_register_overlap: bool, 8 | pub allow_field_overlap: bool, 9 | pub allow_enum_dup_value: bool, 10 | pub allow_unused_enums: bool, 11 | pub allow_unused_fieldsets: bool, 12 | } 13 | 14 | pub fn validate(ir: &IR, options: Options) -> Vec { 15 | let mut errs = Vec::new(); 16 | 17 | let mut used_fieldsets = BTreeSet::new(); 18 | let mut used_enums = BTreeSet::new(); 19 | 20 | for (bname, b) in &ir.blocks { 21 | if let Some(n) = &b.extends { 22 | if !ir.blocks.contains_key(n) { 23 | errs.push(format!( 24 | "block {}: extends block {} does not exist", 25 | bname, n 26 | )) 27 | } 28 | } 29 | 30 | for bi in &b.items { 31 | match &bi.inner { 32 | BlockItemInner::Block(i) => { 33 | if !ir.blocks.contains_key(&i.block) { 34 | errs.push(format!( 35 | "block {} item {}: block {} does not exist", 36 | bname, bi.name, i.block 37 | )) 38 | } 39 | } 40 | BlockItemInner::Register(i) => { 41 | if let Some(fs) = &i.fieldset { 42 | used_fieldsets.insert(fs.clone()); 43 | if !ir.fieldsets.contains_key(fs) { 44 | errs.push(format!( 45 | "block {} item {}: fieldset {} does not exist", 46 | bname, bi.name, fs 47 | )) 48 | } 49 | } 50 | } 51 | } 52 | } 53 | 54 | if !options.allow_register_overlap { 55 | for (i1, i2) in Pairs::new(b.items.iter()) { 56 | if i1.byte_offset == i2.byte_offset { 57 | errs.push(format!( 58 | "block {}: registers overlap: {} {}", 59 | bname, i1.name, i2.name 60 | )); 61 | } 62 | } 63 | } 64 | } 65 | 66 | for (fsname, fs) in &ir.fieldsets { 67 | if let Some(n) = &fs.extends { 68 | used_fieldsets.insert(n.clone()); 69 | if !ir.fieldsets.contains_key(n) { 70 | errs.push(format!( 71 | "fieldset {}: extends fieldset {} does not exist", 72 | fsname, n 73 | )) 74 | } 75 | } 76 | } 77 | 78 | for (fsname, fs) in &ir.fieldsets { 79 | if !options.allow_unused_fieldsets && !used_fieldsets.contains(fsname) { 80 | errs.push(format!("fieldset {} is unused", fsname)); 81 | } 82 | 83 | 'FIELD: for f in &fs.fields { 84 | if let Some(ename) = &f.enumm { 85 | used_enums.insert(ename.clone()); 86 | 87 | let Some(e) = ir.enums.get(ename) else { 88 | errs.push(format!( 89 | "fieldset {} field {}: enum {} does not exist", 90 | fsname, f.name, ename 91 | )); 92 | continue; 93 | }; 94 | 95 | // do extra check when bit_offset is in "range mode" 96 | if let BitOffset::Cursed(ranges) = &f.bit_offset { 97 | let mut last_max_index = 0; 98 | let mut ranges_size = 0; 99 | for (index, range) in ranges.iter().enumerate() { 100 | // every "range" shouldn't be empty (aka start > end) 101 | if range.is_empty() { 102 | errs.push(format!( 103 | "fieldset {} field {}: end value of bit_offset is bigger than start value", 104 | fsname, f.name, 105 | )); 106 | continue 'FIELD; 107 | } 108 | 109 | // "range"s of same field shouldn't overlap 110 | if index > 0 { 111 | match range.start().cmp(&last_max_index) { 112 | Ordering::Less => { 113 | errs.push(format!( 114 | "fieldset {} field {}: bit_offset is overlapped with itself", 115 | fsname, f.name, 116 | )); 117 | continue 'FIELD; 118 | } 119 | Ordering::Equal => { 120 | errs.push(format!( 121 | "fieldset {} field {}: bit_offset has continuous part, should be merged", 122 | fsname, f.name, 123 | )); 124 | continue 'FIELD; 125 | } 126 | Ordering::Greater => last_max_index = *range.end(), 127 | } 128 | } 129 | ranges_size += range.end() - range.start() + 1; 130 | } 131 | 132 | // bit size from "ranges" should be the same as field bit_size 133 | if ranges_size != f.bit_size { 134 | errs.push(format!( 135 | "fieldset {} field {}: size of bit_offset ranges is mismatch with field bit_size", 136 | fsname, f.name, 137 | )); 138 | continue; 139 | } 140 | } 141 | 142 | if f.bit_size != e.bit_size { 143 | errs.push(format!( 144 | "fieldset {} field {}: bit_size {} does not match enum {} bit_size {}", 145 | fsname, f.name, f.bit_size, ename, e.bit_size 146 | )); 147 | } 148 | } 149 | } 150 | 151 | if !options.allow_field_overlap { 152 | for (i1, i2) in Pairs::new(fs.fields.iter()) { 153 | // expand every BitOffset to a Vec, 154 | // and compare at that level 155 | 'COMPARE: for i1_range in i1.bit_offset.clone().into_ranges(i1.bit_size) { 156 | for i2_range in i2.bit_offset.clone().into_ranges(i2.bit_size) { 157 | if i2_range.end() > i1_range.start() && i1_range.end() > i2_range.start() { 158 | errs.push(format!( 159 | "fieldset {}: fields overlap: {} {}", 160 | fsname, i1.name, i2.name 161 | )); 162 | break 'COMPARE; 163 | } 164 | } 165 | } 166 | } 167 | } 168 | } 169 | 170 | for (ename, e) in &ir.enums { 171 | if !options.allow_unused_enums && !used_enums.contains(ename) { 172 | errs.push(format!("enum {} is unused", ename)); 173 | } 174 | 175 | let maxval = 1 << e.bit_size; 176 | for v in &e.variants { 177 | if v.value >= maxval { 178 | errs.push(format!( 179 | "enum {} variant {}: value {} is not less than than max 1<<{} = {}", 180 | ename, v.name, v.value, e.bit_size, maxval, 181 | )); 182 | } 183 | } 184 | 185 | if !options.allow_enum_dup_value { 186 | for (i1, i2) in Pairs::new(e.variants.iter()) { 187 | if i1.value == i2.value { 188 | errs.push(format!( 189 | "enum {}: variants with same value: {} {}", 190 | ename, i1.name, i2.name 191 | )); 192 | } 193 | } 194 | } 195 | } 196 | 197 | errs 198 | } 199 | 200 | // ============== 201 | 202 | struct Pairs { 203 | head: Option, 204 | tail: U, 205 | next: U, 206 | } 207 | 208 | impl Pairs { 209 | fn new(mut iter: U) -> Self { 210 | let head = iter.next(); 211 | Pairs { 212 | head, 213 | tail: iter.clone(), 214 | next: iter, 215 | } 216 | } 217 | } 218 | 219 | impl Iterator for Pairs 220 | where 221 | U::Item: Clone, 222 | { 223 | type Item = (U::Item, U::Item); 224 | 225 | fn next(&mut self) -> Option { 226 | let a = self.head.as_ref()?.clone(); 227 | 228 | if let Some(b) = self.tail.next() { 229 | return Some((a, b)); 230 | } 231 | 232 | match self.next.next() { 233 | Some(new_head) => { 234 | self.head = Some(new_head); 235 | self.tail = self.next.clone(); 236 | self.next() 237 | } 238 | None => None, 239 | } 240 | } 241 | } 242 | --------------------------------------------------------------------------------