├── .envrc ├── .gitignore ├── .replit ├── Cargo.lock ├── Cargo.toml ├── bin └── flurry │ ├── Cargo.toml │ └── src │ └── main.rs ├── examples ├── hello.sf └── hello.sf.conf ├── lib ├── fractal │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── parser │ ├── Cargo.toml │ ├── build.rs │ ├── readme.md │ └── src │ │ ├── ast.rs │ │ ├── indentation.rs │ │ ├── lexer.rs │ │ ├── lib.rs │ │ ├── snowflake.lalrpop │ │ └── token.rs └── tag │ ├── Cargo.toml │ └── src │ └── lib.rs ├── nix ├── default.nix ├── sources.json └── sources.nix ├── readme.md └── shell.nix /.envrc: -------------------------------------------------------------------------------- 1 | eval "$(lorri direnv)" 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # These are backup files generated by rustfmt 7 | **/*.rs.bk -------------------------------------------------------------------------------- /.replit: -------------------------------------------------------------------------------- 1 | run = "bash -c \"[ -d ~/.cargo ] || bash <(curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs) -y --default-toolchain nightly; ~/.cargo/bin/cargo build; target/debug/flurry example/hello.sf example/hello.sf.conf\"" 2 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | [[package]] 4 | name = "aho-corasick" 5 | version = "0.7.13" 6 | source = "registry+https://github.com/rust-lang/crates.io-index" 7 | checksum = "043164d8ba5c4c3035fec9bbee8647c0261d788f3474306f93bb65901cae0e86" 8 | dependencies = [ 9 | "memchr", 10 | ] 11 | 12 | [[package]] 13 | name = "arrayref" 14 | version = "0.3.6" 15 | source = "registry+https://github.com/rust-lang/crates.io-index" 16 | checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" 17 | 18 | [[package]] 19 | name = "arrayvec" 20 | version = "0.5.1" 21 | source = "registry+https://github.com/rust-lang/crates.io-index" 22 | checksum = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" 23 | 24 | [[package]] 25 | name = "ascii-canvas" 26 | version = "2.0.0" 27 | source = "registry+https://github.com/rust-lang/crates.io-index" 28 | checksum = "ff8eb72df928aafb99fe5d37b383f2fe25bd2a765e3e5f7c365916b6f2463a29" 29 | dependencies = [ 30 | "term", 31 | ] 32 | 33 | [[package]] 34 | name = "atty" 35 | version = "0.2.14" 36 | source = "registry+https://github.com/rust-lang/crates.io-index" 37 | checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" 38 | dependencies = [ 39 | "hermit-abi", 40 | "libc", 41 | "winapi", 42 | ] 43 | 44 | [[package]] 45 | name = "autocfg" 46 | version = "1.0.0" 47 | source = "registry+https://github.com/rust-lang/crates.io-index" 48 | checksum = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d" 49 | 50 | [[package]] 51 | name = "base64" 52 | version = "0.11.0" 53 | source = "registry+https://github.com/rust-lang/crates.io-index" 54 | checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" 55 | 56 | [[package]] 57 | name = "beef" 58 | version = "0.4.4" 59 | source = "registry+https://github.com/rust-lang/crates.io-index" 60 | checksum = "474a626a67200bd107d44179bb3d4fc61891172d11696609264589be6a0e6a43" 61 | 62 | [[package]] 63 | name = "bit-set" 64 | version = "0.5.2" 65 | source = "registry+https://github.com/rust-lang/crates.io-index" 66 | checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de" 67 | dependencies = [ 68 | "bit-vec", 69 | ] 70 | 71 | [[package]] 72 | name = "bit-vec" 73 | version = "0.6.2" 74 | source = "registry+https://github.com/rust-lang/crates.io-index" 75 | checksum = "5f0dc55f2d8a1a85650ac47858bb001b4c0dd73d79e3c455a842925e68d29cd3" 76 | 77 | [[package]] 78 | name = "blake2b_simd" 79 | version = "0.5.10" 80 | source = "registry+https://github.com/rust-lang/crates.io-index" 81 | checksum = "d8fb2d74254a3a0b5cac33ac9f8ed0e44aa50378d9dbb2e5d83bd21ed1dc2c8a" 82 | dependencies = [ 83 | "arrayref", 84 | "arrayvec", 85 | "constant_time_eq", 86 | ] 87 | 88 | [[package]] 89 | name = "block-buffer" 90 | version = "0.7.3" 91 | source = "registry+https://github.com/rust-lang/crates.io-index" 92 | checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" 93 | dependencies = [ 94 | "block-padding", 95 | "byte-tools", 96 | "byteorder", 97 | "generic-array", 98 | ] 99 | 100 | [[package]] 101 | name = "block-padding" 102 | version = "0.1.5" 103 | source = "registry+https://github.com/rust-lang/crates.io-index" 104 | checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" 105 | dependencies = [ 106 | "byte-tools", 107 | ] 108 | 109 | [[package]] 110 | name = "byte-tools" 111 | version = "0.3.1" 112 | source = "registry+https://github.com/rust-lang/crates.io-index" 113 | checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" 114 | 115 | [[package]] 116 | name = "byteorder" 117 | version = "1.3.4" 118 | source = "registry+https://github.com/rust-lang/crates.io-index" 119 | checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" 120 | 121 | [[package]] 122 | name = "cfg-if" 123 | version = "0.1.10" 124 | source = "registry+https://github.com/rust-lang/crates.io-index" 125 | checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" 126 | 127 | [[package]] 128 | name = "constant_time_eq" 129 | version = "0.1.5" 130 | source = "registry+https://github.com/rust-lang/crates.io-index" 131 | checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" 132 | 133 | [[package]] 134 | name = "crossbeam-utils" 135 | version = "0.7.2" 136 | source = "registry+https://github.com/rust-lang/crates.io-index" 137 | checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" 138 | dependencies = [ 139 | "autocfg", 140 | "cfg-if", 141 | "lazy_static", 142 | ] 143 | 144 | [[package]] 145 | name = "diff" 146 | version = "0.1.12" 147 | source = "registry+https://github.com/rust-lang/crates.io-index" 148 | checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499" 149 | 150 | [[package]] 151 | name = "digest" 152 | version = "0.8.1" 153 | source = "registry+https://github.com/rust-lang/crates.io-index" 154 | checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" 155 | dependencies = [ 156 | "generic-array", 157 | ] 158 | 159 | [[package]] 160 | name = "dirs" 161 | version = "1.0.5" 162 | source = "registry+https://github.com/rust-lang/crates.io-index" 163 | checksum = "3fd78930633bd1c6e35c4b42b1df7b0cbc6bc191146e512bb3bedf243fcc3901" 164 | dependencies = [ 165 | "libc", 166 | "redox_users", 167 | "winapi", 168 | ] 169 | 170 | [[package]] 171 | name = "docopt" 172 | version = "1.1.0" 173 | source = "registry+https://github.com/rust-lang/crates.io-index" 174 | checksum = "7f525a586d310c87df72ebcd98009e57f1cc030c8c268305287a476beb653969" 175 | dependencies = [ 176 | "lazy_static", 177 | "regex", 178 | "serde", 179 | "strsim", 180 | ] 181 | 182 | [[package]] 183 | name = "either" 184 | version = "1.6.0" 185 | source = "registry+https://github.com/rust-lang/crates.io-index" 186 | checksum = "cd56b59865bce947ac5958779cfa508f6c3b9497cc762b7e24a12d11ccde2c4f" 187 | 188 | [[package]] 189 | name = "ena" 190 | version = "0.14.0" 191 | source = "registry+https://github.com/rust-lang/crates.io-index" 192 | checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3" 193 | dependencies = [ 194 | "log", 195 | ] 196 | 197 | [[package]] 198 | name = "fake-simd" 199 | version = "0.1.2" 200 | source = "registry+https://github.com/rust-lang/crates.io-index" 201 | checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" 202 | 203 | [[package]] 204 | name = "fixedbitset" 205 | version = "0.2.0" 206 | source = "registry+https://github.com/rust-lang/crates.io-index" 207 | checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d" 208 | 209 | [[package]] 210 | name = "flurry" 211 | version = "0.1.0" 212 | dependencies = [ 213 | "fractal", 214 | "parser", 215 | "tag", 216 | ] 217 | 218 | [[package]] 219 | name = "fnv" 220 | version = "1.0.7" 221 | source = "registry+https://github.com/rust-lang/crates.io-index" 222 | checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" 223 | 224 | [[package]] 225 | name = "fractal" 226 | version = "0.1.0" 227 | dependencies = [ 228 | "num-bigint", 229 | "parser", 230 | "serde", 231 | "tag", 232 | "thiserror", 233 | ] 234 | 235 | [[package]] 236 | name = "generic-array" 237 | version = "0.12.3" 238 | source = "registry+https://github.com/rust-lang/crates.io-index" 239 | checksum = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec" 240 | dependencies = [ 241 | "typenum", 242 | ] 243 | 244 | [[package]] 245 | name = "getrandom" 246 | version = "0.1.14" 247 | source = "registry+https://github.com/rust-lang/crates.io-index" 248 | checksum = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb" 249 | dependencies = [ 250 | "cfg-if", 251 | "libc", 252 | "wasi", 253 | ] 254 | 255 | [[package]] 256 | name = "hashbrown" 257 | version = "0.8.2" 258 | source = "registry+https://github.com/rust-lang/crates.io-index" 259 | checksum = "e91b62f79061a0bc2e046024cb7ba44b08419ed238ecbd9adbd787434b9e8c25" 260 | dependencies = [ 261 | "autocfg", 262 | ] 263 | 264 | [[package]] 265 | name = "hermit-abi" 266 | version = "0.1.15" 267 | source = "registry+https://github.com/rust-lang/crates.io-index" 268 | checksum = "3deed196b6e7f9e44a2ae8d94225d80302d81208b1bb673fd21fe634645c85a9" 269 | dependencies = [ 270 | "libc", 271 | ] 272 | 273 | [[package]] 274 | name = "id-arena" 275 | version = "2.2.1" 276 | source = "registry+https://github.com/rust-lang/crates.io-index" 277 | checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005" 278 | 279 | [[package]] 280 | name = "indexmap" 281 | version = "1.5.1" 282 | source = "registry+https://github.com/rust-lang/crates.io-index" 283 | checksum = "86b45e59b16c76b11bf9738fd5d38879d3bd28ad292d7b313608becb17ae2df9" 284 | dependencies = [ 285 | "autocfg", 286 | "hashbrown", 287 | ] 288 | 289 | [[package]] 290 | name = "indoc" 291 | version = "1.0.2" 292 | source = "registry+https://github.com/rust-lang/crates.io-index" 293 | checksum = "644defcefee68d7805653a682e99a2e2a5014a1fc3cc9be7059a215844eeea6f" 294 | dependencies = [ 295 | "unindent", 296 | ] 297 | 298 | [[package]] 299 | name = "itertools" 300 | version = "0.9.0" 301 | source = "registry+https://github.com/rust-lang/crates.io-index" 302 | checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" 303 | dependencies = [ 304 | "either", 305 | ] 306 | 307 | [[package]] 308 | name = "lalrpop" 309 | version = "0.19.0" 310 | source = "registry+https://github.com/rust-lang/crates.io-index" 311 | checksum = "d6f55673d283313791404be21209bb433f128f7e5c451986df107eb5fdbd68d2" 312 | dependencies = [ 313 | "ascii-canvas", 314 | "atty", 315 | "bit-set", 316 | "diff", 317 | "docopt", 318 | "ena", 319 | "itertools", 320 | "lalrpop-util", 321 | "petgraph", 322 | "regex", 323 | "regex-syntax", 324 | "serde", 325 | "serde_derive", 326 | "sha2", 327 | "string_cache", 328 | "term", 329 | "unicode-xid", 330 | ] 331 | 332 | [[package]] 333 | name = "lalrpop-util" 334 | version = "0.19.0" 335 | source = "registry+https://github.com/rust-lang/crates.io-index" 336 | checksum = "f7e88f15a7d31dfa8fb607986819039127f0161058a3b248a146142d276cbd28" 337 | 338 | [[package]] 339 | name = "lazy_static" 340 | version = "1.4.0" 341 | source = "registry+https://github.com/rust-lang/crates.io-index" 342 | checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" 343 | 344 | [[package]] 345 | name = "libc" 346 | version = "0.2.74" 347 | source = "registry+https://github.com/rust-lang/crates.io-index" 348 | checksum = "a2f02823cf78b754822df5f7f268fb59822e7296276d3e069d8e8cb26a14bd10" 349 | 350 | [[package]] 351 | name = "log" 352 | version = "0.4.11" 353 | source = "registry+https://github.com/rust-lang/crates.io-index" 354 | checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" 355 | dependencies = [ 356 | "cfg-if", 357 | ] 358 | 359 | [[package]] 360 | name = "logos" 361 | version = "0.11.4" 362 | source = "registry+https://github.com/rust-lang/crates.io-index" 363 | checksum = "b91c49573597a5d6c094f9031617bb1fed15c0db68c81e6546d313414ce107e4" 364 | dependencies = [ 365 | "logos-derive", 366 | ] 367 | 368 | [[package]] 369 | name = "logos-derive" 370 | version = "0.11.5" 371 | source = "registry+https://github.com/rust-lang/crates.io-index" 372 | checksum = "797b1f8a0571b331c1b47e7db245af3dc634838da7a92b3bef4e30376ae1c347" 373 | dependencies = [ 374 | "beef", 375 | "fnv", 376 | "proc-macro2", 377 | "quote", 378 | "regex-syntax", 379 | "syn", 380 | "utf8-ranges", 381 | ] 382 | 383 | [[package]] 384 | name = "memchr" 385 | version = "2.3.3" 386 | source = "registry+https://github.com/rust-lang/crates.io-index" 387 | checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400" 388 | 389 | [[package]] 390 | name = "new_debug_unreachable" 391 | version = "1.0.4" 392 | source = "registry+https://github.com/rust-lang/crates.io-index" 393 | checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" 394 | 395 | [[package]] 396 | name = "num-bigint" 397 | version = "0.3.0" 398 | source = "registry+https://github.com/rust-lang/crates.io-index" 399 | checksum = "b7f3fc75e3697059fb1bc465e3d8cca6cf92f56854f201158b3f9c77d5a3cfa0" 400 | dependencies = [ 401 | "autocfg", 402 | "num-integer", 403 | "num-traits", 404 | ] 405 | 406 | [[package]] 407 | name = "num-integer" 408 | version = "0.1.43" 409 | source = "registry+https://github.com/rust-lang/crates.io-index" 410 | checksum = "8d59457e662d541ba17869cf51cf177c0b5f0cbf476c66bdc90bf1edac4f875b" 411 | dependencies = [ 412 | "autocfg", 413 | "num-traits", 414 | ] 415 | 416 | [[package]] 417 | name = "num-traits" 418 | version = "0.2.12" 419 | source = "registry+https://github.com/rust-lang/crates.io-index" 420 | checksum = "ac267bcc07f48ee5f8935ab0d24f316fb722d7a1292e2913f0cc196b29ffd611" 421 | dependencies = [ 422 | "autocfg", 423 | ] 424 | 425 | [[package]] 426 | name = "opaque-debug" 427 | version = "0.2.3" 428 | source = "registry+https://github.com/rust-lang/crates.io-index" 429 | checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" 430 | 431 | [[package]] 432 | name = "parser" 433 | version = "0.1.0" 434 | dependencies = [ 435 | "indoc", 436 | "lalrpop", 437 | "lalrpop-util", 438 | "logos", 439 | "num-bigint", 440 | "regex", 441 | ] 442 | 443 | [[package]] 444 | name = "petgraph" 445 | version = "0.5.1" 446 | source = "registry+https://github.com/rust-lang/crates.io-index" 447 | checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7" 448 | dependencies = [ 449 | "fixedbitset", 450 | "indexmap", 451 | ] 452 | 453 | [[package]] 454 | name = "phf_shared" 455 | version = "0.8.0" 456 | source = "registry+https://github.com/rust-lang/crates.io-index" 457 | checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" 458 | dependencies = [ 459 | "siphasher", 460 | ] 461 | 462 | [[package]] 463 | name = "precomputed-hash" 464 | version = "0.1.1" 465 | source = "registry+https://github.com/rust-lang/crates.io-index" 466 | checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" 467 | 468 | [[package]] 469 | name = "proc-macro2" 470 | version = "1.0.19" 471 | source = "registry+https://github.com/rust-lang/crates.io-index" 472 | checksum = "04f5f085b5d71e2188cb8271e5da0161ad52c3f227a661a3c135fdf28e258b12" 473 | dependencies = [ 474 | "unicode-xid", 475 | ] 476 | 477 | [[package]] 478 | name = "quote" 479 | version = "1.0.7" 480 | source = "registry+https://github.com/rust-lang/crates.io-index" 481 | checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" 482 | dependencies = [ 483 | "proc-macro2", 484 | ] 485 | 486 | [[package]] 487 | name = "redox_syscall" 488 | version = "0.1.57" 489 | source = "registry+https://github.com/rust-lang/crates.io-index" 490 | checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" 491 | 492 | [[package]] 493 | name = "redox_users" 494 | version = "0.3.4" 495 | source = "registry+https://github.com/rust-lang/crates.io-index" 496 | checksum = "09b23093265f8d200fa7b4c2c76297f47e681c655f6f1285a8780d6a022f7431" 497 | dependencies = [ 498 | "getrandom", 499 | "redox_syscall", 500 | "rust-argon2", 501 | ] 502 | 503 | [[package]] 504 | name = "regex" 505 | version = "1.3.9" 506 | source = "registry+https://github.com/rust-lang/crates.io-index" 507 | checksum = "9c3780fcf44b193bc4d09f36d2a3c87b251da4a046c87795a0d35f4f927ad8e6" 508 | dependencies = [ 509 | "aho-corasick", 510 | "memchr", 511 | "regex-syntax", 512 | "thread_local", 513 | ] 514 | 515 | [[package]] 516 | name = "regex-syntax" 517 | version = "0.6.18" 518 | source = "registry+https://github.com/rust-lang/crates.io-index" 519 | checksum = "26412eb97c6b088a6997e05f69403a802a92d520de2f8e63c2b65f9e0f47c4e8" 520 | 521 | [[package]] 522 | name = "rust-argon2" 523 | version = "0.7.0" 524 | source = "registry+https://github.com/rust-lang/crates.io-index" 525 | checksum = "2bc8af4bda8e1ff4932523b94d3dd20ee30a87232323eda55903ffd71d2fb017" 526 | dependencies = [ 527 | "base64", 528 | "blake2b_simd", 529 | "constant_time_eq", 530 | "crossbeam-utils", 531 | ] 532 | 533 | [[package]] 534 | name = "sdset" 535 | version = "0.4.0" 536 | source = "registry+https://github.com/rust-lang/crates.io-index" 537 | checksum = "cbb21fe0588557792176c89bc7b943027b14f346d03c6be6a199c2860277d93a" 538 | 539 | [[package]] 540 | name = "serde" 541 | version = "1.0.115" 542 | source = "registry+https://github.com/rust-lang/crates.io-index" 543 | checksum = "e54c9a88f2da7238af84b5101443f0c0d0a3bbdc455e34a5c9497b1903ed55d5" 544 | dependencies = [ 545 | "serde_derive", 546 | ] 547 | 548 | [[package]] 549 | name = "serde_derive" 550 | version = "1.0.115" 551 | source = "registry+https://github.com/rust-lang/crates.io-index" 552 | checksum = "609feed1d0a73cc36a0182a840a9b37b4a82f0b1150369f0536a9e3f2a31dc48" 553 | dependencies = [ 554 | "proc-macro2", 555 | "quote", 556 | "syn", 557 | ] 558 | 559 | [[package]] 560 | name = "sha2" 561 | version = "0.8.2" 562 | source = "registry+https://github.com/rust-lang/crates.io-index" 563 | checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" 564 | dependencies = [ 565 | "block-buffer", 566 | "digest", 567 | "fake-simd", 568 | "opaque-debug", 569 | ] 570 | 571 | [[package]] 572 | name = "siphasher" 573 | version = "0.3.3" 574 | source = "registry+https://github.com/rust-lang/crates.io-index" 575 | checksum = "fa8f3741c7372e75519bd9346068370c9cdaabcc1f9599cbcf2a2719352286b7" 576 | 577 | [[package]] 578 | name = "string_cache" 579 | version = "0.8.0" 580 | source = "registry+https://github.com/rust-lang/crates.io-index" 581 | checksum = "2940c75beb4e3bf3a494cef919a747a2cb81e52571e212bfbd185074add7208a" 582 | dependencies = [ 583 | "lazy_static", 584 | "new_debug_unreachable", 585 | "phf_shared", 586 | "precomputed-hash", 587 | "serde", 588 | ] 589 | 590 | [[package]] 591 | name = "strsim" 592 | version = "0.9.3" 593 | source = "registry+https://github.com/rust-lang/crates.io-index" 594 | checksum = "6446ced80d6c486436db5c078dde11a9f73d42b57fb273121e160b84f63d894c" 595 | 596 | [[package]] 597 | name = "syn" 598 | version = "1.0.38" 599 | source = "registry+https://github.com/rust-lang/crates.io-index" 600 | checksum = "e69abc24912995b3038597a7a593be5053eb0fb44f3cc5beec0deb421790c1f4" 601 | dependencies = [ 602 | "proc-macro2", 603 | "quote", 604 | "unicode-xid", 605 | ] 606 | 607 | [[package]] 608 | name = "tag" 609 | version = "0.1.0" 610 | dependencies = [ 611 | "id-arena", 612 | "sdset", 613 | "thiserror", 614 | ] 615 | 616 | [[package]] 617 | name = "term" 618 | version = "0.5.2" 619 | source = "registry+https://github.com/rust-lang/crates.io-index" 620 | checksum = "edd106a334b7657c10b7c540a0106114feadeb4dc314513e97df481d5d966f42" 621 | dependencies = [ 622 | "byteorder", 623 | "dirs", 624 | "winapi", 625 | ] 626 | 627 | [[package]] 628 | name = "thiserror" 629 | version = "1.0.20" 630 | source = "registry+https://github.com/rust-lang/crates.io-index" 631 | checksum = "7dfdd070ccd8ccb78f4ad66bf1982dc37f620ef696c6b5028fe2ed83dd3d0d08" 632 | dependencies = [ 633 | "thiserror-impl", 634 | ] 635 | 636 | [[package]] 637 | name = "thiserror-impl" 638 | version = "1.0.20" 639 | source = "registry+https://github.com/rust-lang/crates.io-index" 640 | checksum = "bd80fc12f73063ac132ac92aceea36734f04a1d93c1240c6944e23a3b8841793" 641 | dependencies = [ 642 | "proc-macro2", 643 | "quote", 644 | "syn", 645 | ] 646 | 647 | [[package]] 648 | name = "thread_local" 649 | version = "1.0.1" 650 | source = "registry+https://github.com/rust-lang/crates.io-index" 651 | checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" 652 | dependencies = [ 653 | "lazy_static", 654 | ] 655 | 656 | [[package]] 657 | name = "typenum" 658 | version = "1.12.0" 659 | source = "registry+https://github.com/rust-lang/crates.io-index" 660 | checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" 661 | 662 | [[package]] 663 | name = "unicode-xid" 664 | version = "0.2.1" 665 | source = "registry+https://github.com/rust-lang/crates.io-index" 666 | checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" 667 | 668 | [[package]] 669 | name = "unindent" 670 | version = "0.1.6" 671 | source = "registry+https://github.com/rust-lang/crates.io-index" 672 | checksum = "af41d708427f8fd0e915dcebb2cae0f0e6acb2a939b2d399c265c39a38a18942" 673 | 674 | [[package]] 675 | name = "utf8-ranges" 676 | version = "1.0.4" 677 | source = "registry+https://github.com/rust-lang/crates.io-index" 678 | checksum = "b4ae116fef2b7fea257ed6440d3cfcff7f190865f170cdad00bb6465bf18ecba" 679 | 680 | [[package]] 681 | name = "wasi" 682 | version = "0.9.0+wasi-snapshot-preview1" 683 | source = "registry+https://github.com/rust-lang/crates.io-index" 684 | checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" 685 | 686 | [[package]] 687 | name = "winapi" 688 | version = "0.3.9" 689 | source = "registry+https://github.com/rust-lang/crates.io-index" 690 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 691 | dependencies = [ 692 | "winapi-i686-pc-windows-gnu", 693 | "winapi-x86_64-pc-windows-gnu", 694 | ] 695 | 696 | [[package]] 697 | name = "winapi-i686-pc-windows-gnu" 698 | version = "0.4.0" 699 | source = "registry+https://github.com/rust-lang/crates.io-index" 700 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 701 | 702 | [[package]] 703 | name = "winapi-x86_64-pc-windows-gnu" 704 | version = "0.4.0" 705 | source = "registry+https://github.com/rust-lang/crates.io-index" 706 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 707 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "lib/parser", 4 | "lib/tag", 5 | "lib/fractal", 6 | 7 | "bin/flurry", 8 | ] 9 | -------------------------------------------------------------------------------- /bin/flurry/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "flurry" 3 | version = "0.1.0" 4 | authors = ["the snowflake authors "] 5 | edition = "2018" 6 | license = "MPL-2.0" 7 | 8 | [dependencies] 9 | tag = { path = "../../lib/tag" } 10 | parser = { path = "../../lib/parser" } 11 | fractal = { path = "../../lib/fractal" } 12 | -------------------------------------------------------------------------------- /bin/flurry/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::fs::File; 3 | use std::io::{Read}; 4 | use std::process::exit; 5 | use std::borrow::Cow; 6 | use std::collections::HashMap; 7 | 8 | use fractal::{Evaluator, EvaluatorConfig, TypedExpression}; 9 | use parser::{lexer, snowflake::ProgramParser, ast::{Statement, Type, Expression}}; 10 | use tag::{TagName}; 11 | 12 | // Wrapper for unwrapping Results and printing errors cleanly 13 | macro_rules! unwrap { 14 | ( $x:expr ) => { 15 | match $x { 16 | Ok(o) => o, 17 | Err(e) => { 18 | eprintln!("Error!: {}", e); 19 | exit(1); 20 | } 21 | } 22 | }; 23 | ( debug $x:expr ) => { 24 | match $x { 25 | Ok(o) => o, 26 | Err(e) => { 27 | eprintln!("Error!: {:?}", e); 28 | exit(1); 29 | } 30 | } 31 | }; 32 | ( pretty $x:expr ) => { 33 | match $x { 34 | Ok(o) => o, 35 | Err(e) => { 36 | eprintln!("Error!: {:#?}", e); 37 | exit(1); 38 | } 39 | } 40 | }; 41 | } 42 | 43 | fn main() -> Result<(), Box> { 44 | let args: Vec = env::args().collect(); 45 | 46 | if args.len() < 3 { 47 | eprintln!("Usage: flurry [FILE] [CONFIG]"); 48 | exit(1); 49 | } 50 | 51 | let mut file = unwrap!(File::open(&args[1])); 52 | let mut contents = String::new(); 53 | file.read_to_string(&mut contents)?; 54 | 55 | let mut config_file = unwrap!(File::open(&args[2])); 56 | let mut config = String::new(); 57 | config_file.read_to_string(&mut config)?; 58 | 59 | let input = lexer::lex(&contents); 60 | let program = ProgramParser::new().parse(input).unwrap(); 61 | 62 | let split: Vec<&str> = config.split(":").collect(); 63 | let proj = split[0]; 64 | let mut file_tags: HashMap> = HashMap::new(); 65 | file_tags.insert(args[2].clone(), Vec::new()); // required for evaluator.prepare to work 66 | 67 | let conf = EvaluatorConfig { 68 | project_tag: TagName::Primary(Cow::from(proj)), 69 | file_tags, 70 | }; 71 | let mut evaluator = Evaluator::new(conf); 72 | 73 | let mut source: HashMap> = HashMap::new(); 74 | source.insert(args[2].clone(), program); 75 | evaluator.populate(&source)?; 76 | 77 | let main = evaluator.entries 78 | .iter_mut() 79 | .filter(|t| t.binding.index() == 0) 80 | .next() 81 | .unwrap() 82 | .clone(); 83 | 84 | evaluator.eval(&main, vec![TypedExpression(Type::Identifier(String::from("ilarge")), Expression::Integer(69.into()))])?; 85 | 86 | Ok(()) 87 | } 88 | -------------------------------------------------------------------------------- /examples/hello.sf: -------------------------------------------------------------------------------- 1 | main :: ilarge -> ilarge 2 | main :: tag nothing 3 | main a => 4 | println "Hello world!" 5 | 69 6 | 7 | -------------------------------------------------------------------------------- /examples/hello.sf.conf: -------------------------------------------------------------------------------- 1 | hello:hello 2 | -------------------------------------------------------------------------------- /lib/fractal/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "fractal" 3 | version = "0.1.0" 4 | authors = ["the snowflake authors "] 5 | edition = "2018" 6 | license = "MPL-2.0" 7 | 8 | [dependencies] 9 | serde = "^1" 10 | thiserror = "^1" 11 | num-bigint = "^0.3" 12 | parser = { path = "../parser" } 13 | tag = { path = "../tag" } 14 | -------------------------------------------------------------------------------- /lib/fractal/src/lib.rs: -------------------------------------------------------------------------------- 1 | use num_bigint::BigInt; 2 | use parser::ast::{Expression, OpSymbol, Statement, Tag, Type}; 3 | use std::{borrow::Cow, collections::HashMap}; 4 | use tag::{TagName, Universe, UniverseEntry, UniverseError}; 5 | use thiserror::Error; 6 | 7 | // this is a hack, remove it 8 | #[derive(Debug, PartialEq, Eq, Clone)] 9 | pub struct EvaluatorConfig<'a> { 10 | pub project_tag: TagName<'a>, 11 | pub file_tags: HashMap>>, 12 | } 13 | 14 | pub struct Evaluator<'a> { 15 | universe: Universe<'a, UniverseItem>, 16 | config: EvaluatorConfig<'a>, 17 | pub entries: Vec>, 18 | } 19 | 20 | #[derive(Debug, Eq, PartialEq, Clone)] 21 | pub struct TypedExpression(pub Type, pub Expression); 22 | 23 | #[derive(Debug, Eq, PartialEq, Clone)] 24 | pub enum UniverseItem { 25 | FnDecl { 26 | sig: Type, 27 | args: Vec, 28 | body: Vec>, 29 | }, 30 | 31 | // dummy variant used for implementing Default 32 | None, 33 | } 34 | 35 | impl Default for UniverseItem { 36 | fn default() -> Self { 37 | Self::None 38 | } 39 | } 40 | 41 | impl<'a> Evaluator<'a> { 42 | pub fn new(config: EvaluatorConfig<'a>) -> Self { 43 | Self { 44 | universe: Universe::default(), 45 | config, 46 | entries: Vec::new(), 47 | } 48 | } 49 | 50 | pub fn populate( 51 | &mut self, 52 | files: &HashMap>, 53 | ) -> Result<(), FractalError> { 54 | // a mapping from a primary tag -> binding name -> tags + type + the body 55 | // 56 | // the options are required to handle the non-existance of a binding in the map 57 | let mut binding_cache: HashMap< 58 | TagName<'a>, 59 | HashMap>>, Option, Option)>, 60 | > = HashMap::new(); 61 | 62 | // insert the known primary tag 63 | binding_cache.insert(self.config.project_tag.clone(), HashMap::new()); 64 | 65 | { 66 | // we currently ever handle one primary tag properly, just for my sanity <3 67 | // TODO(superwhiskers): make it handle multiple 68 | // TODO(superwhiskers): remove expect 69 | let cache = binding_cache 70 | .get_mut(&self.config.project_tag) 71 | .expect("unable to get a value that was just inserted into a map"); 72 | 73 | // loop over the file list, accumulating bindings inside of the hashmap. this is done twice to 74 | // ensure that everything is captured 75 | for (file_path, contents) in files { 76 | // now, iterate over the contents of the file, sifting the bindings 77 | for stmt in contents { 78 | // match against the statement, checking to see if it fits a set of accepted 79 | // bindings 80 | match stmt { 81 | Statement::TypeDecl { name, body } => { 82 | // since we have the name, we can now pull an entry out of the cache 83 | let cache_entry = if let Some(k) = cache.get_mut(name) { 84 | k 85 | } else { 86 | cache.insert(name.clone(), (None, None, None)); 87 | 88 | // TODO(superwhiskers): remove expect 89 | cache.get_mut(name).expect( 90 | "unable to get a value that was just inserted into a map", 91 | ) 92 | }; 93 | 94 | match body { 95 | Type::FnSig { .. } => cache_entry.1 = Some(body.clone()), 96 | Type::Tag(tag) => { 97 | // TODO(superwhiskers): remove expect 98 | let mut tags = self 99 | .config 100 | .file_tags 101 | .get(file_path) 102 | .expect( 103 | "unable to locate the current file in the file tag map", 104 | ) 105 | .clone(); 106 | 107 | tags.push(self.config.project_tag.clone()); 108 | 109 | // flatten the Tag into an array of TagNames 110 | flatten_tag_opcall_to_tagnames(&mut tags, tag); 111 | 112 | // shove it into the cache entry 113 | cache_entry.0 = Some(tags); 114 | } 115 | _ => panic!("unexpected Type kind in type/tag signature"), 116 | } 117 | } 118 | Statement::FnDecl { name, .. } => { 119 | let cache_entry = if let Some(k) = cache.get_mut(name) { 120 | k 121 | } else { 122 | cache.insert(name.clone(), (None, None, None)); 123 | 124 | // TODO(superwhiskers): remove expect 125 | cache.get_mut(name).expect( 126 | "unable to get a value that was just inserted into a map", 127 | ) 128 | }; 129 | 130 | cache_entry.2 = Some(stmt.clone()); 131 | } 132 | _ => panic!("unexpected Statement kind at top level"), 133 | } 134 | } 135 | } 136 | } 137 | 138 | // take the constructed HashMap and construct the Universe 139 | // 140 | // we don't care about taking ownership of the data, this HashMap isn't used past this 141 | // point 142 | for (_, primary_members) in binding_cache { 143 | for (binding_name, binding_value) in primary_members { 144 | // match over the Statement kind of it, as that's what the UniverseItem bases the 145 | // variant off of 146 | println!("binding: {:?}", binding_value); 147 | let (universe_item, tags) = match binding_value.2 { 148 | Some(Statement::FnDecl { args, body, .. }) => { 149 | if let Some(sig) = binding_value.1 { 150 | if let Some(tags) = binding_value.0 { 151 | (UniverseItem::FnDecl { sig, args, body }, tags) 152 | } else { 153 | panic!( 154 | "incomplete binding (missing tags) (this should never happen)" 155 | ); 156 | } 157 | } else { 158 | panic!("incomplete binding (missing type)"); 159 | } 160 | } 161 | // TODO(superwhiskers): actually handle this properly 162 | s => panic!("incomplete binding (missing valid statement, got {:?})", s), 163 | }; 164 | 165 | self.entries.push(self.universe.insert(|b| { 166 | b.set_name(Cow::Owned(binding_name)) 167 | .set_value(universe_item); 168 | for tag in tags { 169 | b.add_tag(tag); 170 | } 171 | b 172 | })?); 173 | } 174 | } 175 | 176 | Ok(()) 177 | } 178 | 179 | // evaluate an expression and return the resulting expression 180 | pub fn eval_expression( 181 | &mut self, 182 | local_bindings: &mut HashMap, 183 | expr: &Box, 184 | ) -> Result, FractalError> { 185 | Ok(Some(match expr.as_ref() { 186 | Expression::Integer(int) => { 187 | TypedExpression(Type::Identifier(String::from("ilarge")), Expression::Integer(int.clone())) 188 | } 189 | Expression::StringLiteral(string) => TypedExpression( 190 | Type::Identifier(String::from("string")), 191 | Expression::StringLiteral(string.clone()), 192 | ), 193 | // if an identifier is passed all the way down, it is retrieved from the local bindings 194 | // hashamp 195 | // 196 | // TODO(superwhiskers): remove expect 197 | Expression::Identifier(ident) => local_bindings 198 | .get(ident) 199 | .expect("unable to retrieve the binding from locals") 200 | .clone(), 201 | Expression::FnCall { 202 | name, 203 | args, 204 | } => { 205 | match name.as_str() { 206 | "println" => { 207 | let boxed_arg = Box::new(args.get(0) 208 | .expect("unable to get the first argument to println").clone()); 209 | println!("{}", if args.len() == 0 { 210 | String::from("") 211 | } else { 212 | // TODO(superwhiskers): remove expect 213 | if let TypedExpression( 214 | Type::Identifier(typen), 215 | Expression::StringLiteral(string), 216 | ) = self.eval_expression( 217 | local_bindings, 218 | &boxed_arg, 219 | )?.expect("unable to evaluate to get a string") { 220 | if typen != "string" { 221 | // TODO(superwhiskers): remove panic 222 | panic!("expression didn't return string to println"); 223 | } 224 | string 225 | } else { 226 | // TODO(superwhiskers): remove panic 227 | panic!("expression didn't return stringliteral to println"); 228 | } 229 | }); 230 | return Ok(None); 231 | } 232 | _ => panic!("unknown function: {}", name), 233 | } 234 | } 235 | _ => panic!("invalid expression: {:?}", expr), 236 | })) 237 | } 238 | 239 | // evaluate a UniverseItem::FnDecl and return the resulting expression 240 | pub fn eval_fn( 241 | &mut self, 242 | item: UniverseItem, 243 | args: Vec, 244 | ) -> Result, FractalError> { 245 | match item { 246 | UniverseItem::FnDecl { 247 | sig, 248 | args: arg_names, 249 | body, 250 | } => { 251 | // create a new binding set 252 | let mut bindings = HashMap::new(); 253 | 254 | // TODO(superwhiskers): populate local bindings w/ intersected ones from universe 255 | 256 | // populate it with the arguments 257 | for i in 0..arg_names.len() { 258 | bindings.insert( 259 | arg_names 260 | .get(i) 261 | .expect("unable to index a vector at an existing indice") 262 | .clone(), 263 | args 264 | .get(i) 265 | .expect("missing argument at indice").clone(), 266 | ); 267 | } 268 | 269 | let mut last = Ok(None); 270 | for expr in &body { 271 | last = self.eval_expression(&mut bindings, expr); 272 | } 273 | 274 | last 275 | } 276 | // TODO(superwhiskers): remove panic 277 | _ => panic!("not a function: {:?}", item), 278 | } 279 | } 280 | 281 | // evaluate a universe entry and return the resutling expression 282 | pub fn eval( 283 | &mut self, 284 | entry: &UniverseEntry<'a, UniverseItem>, 285 | args: Vec, 286 | ) -> Result, FractalError> { 287 | // TODO(superwhiskers): remove expect 288 | self.eval_fn( 289 | self.universe 290 | .get(entry.binding) 291 | .expect("no binding found") 292 | .1 293 | .clone(), 294 | args, 295 | ) 296 | } 297 | } 298 | 299 | /// helper recursive function used to flatten a tag OpCall into an array of TagNames 300 | pub fn flatten_tag_opcall_to_tagnames<'a>(names: &mut Vec>, tag: &Tag) { 301 | match tag { 302 | Tag::OpCall { op, args } => { 303 | if *op != OpSymbol::Circumflex { 304 | // TODO(superwhiskers): remove panic 305 | panic!("operator is not `^`, it is {:?}", op); 306 | } else { 307 | for arg in args { 308 | flatten_tag_opcall_to_tagnames(names, arg); 309 | } 310 | } 311 | } 312 | Tag::PrimaryIdentifier(name) => names.push(TagName::Primary(Cow::Owned(name.clone()))), 313 | Tag::Identifier(name) => names.push(TagName::Secondary(Cow::Owned(name.clone()))), 314 | _ => panic!("unexpected Tag kind {:?}", tag), 315 | } 316 | } 317 | 318 | #[non_exhaustive] 319 | #[derive(Error, Debug)] 320 | pub enum FractalError { 321 | #[error("An error was encountered while using the tag library")] 322 | UniverseError(#[from] UniverseError), 323 | } 324 | -------------------------------------------------------------------------------- /lib/parser/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "parser" 3 | version = "0.1.0" 4 | authors = ["the snowflake authors "] 5 | edition = "2018" 6 | build = "build.rs" 7 | license = "MPL-2.0" 8 | 9 | [build-dependencies] 10 | lalrpop = "0.19" 11 | 12 | [dependencies] 13 | lalrpop-util = "0.19" 14 | regex = "1.3.9" 15 | logos = "0.11.4" 16 | num-bigint = "0.3.0" 17 | 18 | [dev-dependencies] 19 | indoc = "1.0" 20 | -------------------------------------------------------------------------------- /lib/parser/build.rs: -------------------------------------------------------------------------------- 1 | // 2 | // parser - snowflake's parser 3 | // 4 | // copyright (c) 2020 the snowflake authors 5 | // this source code form is subject to the terms of the mozilla public 6 | // license, v. 2.0. if a copy of the mpl was not distributed with this 7 | // file, you can obtain one at http://mozilla.org/MPL/2.0/. 8 | // 9 | 10 | extern crate lalrpop; 11 | 12 | fn main() { 13 | lalrpop::process_root().unwrap(); 14 | } 15 | -------------------------------------------------------------------------------- /lib/parser/readme.md: -------------------------------------------------------------------------------- 1 | # parser 2 | 3 | This directory has code for lexing, parsing, and ast. 4 | 5 | Currently the parsing process is: 6 | 7 | - lex str using logos 8 | - parse lexed tokens into ast using lalrpop 9 | - todo: ast may need to be converted into a better form ast 10 | -------------------------------------------------------------------------------- /lib/parser/src/ast.rs: -------------------------------------------------------------------------------- 1 | // 2 | // parser - snowflake's parser 3 | // 4 | // copyright (c) 2020 the snowflake authors 5 | // this source code form is subject to the terms of the mozilla public 6 | // license, v. 2.0. if a copy of the mpl was not distributed with this 7 | // file, you can obtain one at http://mozilla.org/MPL/2.0/. 8 | // 9 | 10 | use num_bigint::BigInt; 11 | 12 | // "top level" statements that are not an expression 13 | // while anything can be a statement, I think a goal should be that 14 | // anything can return a value 15 | // - @bree 16 | #[derive(Debug, PartialEq, Clone)] 17 | pub enum Statement { 18 | FnDecl { 19 | name: String, 20 | args: Vec, 21 | body: Vec>, 22 | }, 23 | TypeDecl { 24 | name: String, 25 | body: Type, 26 | }, 27 | // ValueDecl { 28 | // pat: Pattern, 29 | // expr: Expression, 30 | // } 31 | 32 | // dummy variant used for implementing Default 33 | None, 34 | } 35 | 36 | impl Default for Statement { 37 | fn default() -> Self { 38 | Self::None 39 | } 40 | } 41 | 42 | #[derive(Debug, Eq, PartialEq, Clone)] 43 | pub enum Type { 44 | FnSig { 45 | args: Vec>, 46 | ret: Box, 47 | }, 48 | Tag(Tag), 49 | Nat(BigInt), 50 | Identifier(String), 51 | 52 | // dummy variant used for implementing Default 53 | None, 54 | } 55 | 56 | impl Default for Type { 57 | fn default() -> Self { 58 | Self::None 59 | } 60 | } 61 | 62 | #[derive(Debug, Eq, PartialEq, Clone)] 63 | pub enum Expression { 64 | OpCall { 65 | op: OpSymbol, 66 | args: Vec>, 67 | }, 68 | FnCall { 69 | name: String, 70 | args: Vec, 71 | }, 72 | Match { 73 | expr: Box, 74 | args: Vec, 75 | }, 76 | Destructure { 77 | pat: Pattern, 78 | body: Vec>, 79 | }, 80 | ValueDecl { 81 | // value assignments 82 | assigns: Vec>, 83 | body: Option>>, 84 | }, 85 | ValueAssign { 86 | pat: Pattern, 87 | expr: Box, 88 | }, 89 | TagAssign { 90 | tag: Tag, 91 | expr: Tag 92 | }, 93 | TypeDecl { 94 | ty: Type, 95 | expr: Box, 96 | }, 97 | Integer(BigInt), 98 | Identifier(String), 99 | StringLiteral(String), 100 | List(Vec>), 101 | } 102 | 103 | #[derive(Debug, Eq, PartialEq, Clone)] 104 | pub enum Pattern { 105 | Wildcard, 106 | Range { 107 | start: Option>, 108 | end: Option>, 109 | }, 110 | Integer(BigInt), 111 | Identifier(String), 112 | StringLiteral(String), 113 | } 114 | 115 | #[derive(Debug, Eq, PartialEq, Clone)] 116 | pub enum Tag { 117 | OpCall { op: OpSymbol, args: Vec> }, 118 | Assign { pats: Vec> }, 119 | PrimaryIdentifier(String), 120 | Identifier(String), 121 | } 122 | 123 | // named OpSymbol so it has some "genericness" for future use 124 | // in something like macros 125 | // - @bree 126 | #[derive(Debug, Eq, PartialEq, Clone)] 127 | pub enum OpSymbol { 128 | Plus, 129 | Minus, 130 | Star, 131 | ForwardSlash, 132 | LAngleBracket, 133 | RAngleBracket, 134 | Circumflex, 135 | } 136 | -------------------------------------------------------------------------------- /lib/parser/src/indentation.rs: -------------------------------------------------------------------------------- 1 | // 2 | // parser - snowflake's parser 3 | // 4 | // copyright (c) 2020 the snowflake authors 5 | // this source code form is subject to the terms of the mozilla public 6 | // license, v. 2.0. if a copy of the mpl was not distributed with this 7 | // file, you can obtain one at http://mozilla.org/MPL/2.0/. 8 | // 9 | 10 | #[derive(Debug, Clone, PartialEq)] 11 | pub enum Indentation { 12 | Indent, 13 | Dedent(usize), 14 | Ondent, 15 | } 16 | 17 | #[derive(Debug, Clone, PartialEq, Default)] 18 | pub struct IndentationLevel { 19 | pub stack: Vec, 20 | } 21 | 22 | impl IndentationLevel { 23 | pub fn new() -> IndentationLevel { 24 | IndentationLevel { stack: vec![] } 25 | } 26 | 27 | pub fn level(&self) -> usize { 28 | *self.stack.last().unwrap_or(&0) 29 | } 30 | 31 | pub fn update(&mut self, level: usize) -> Result { 32 | if level > self.level() { 33 | self.stack.push(level); 34 | Ok(Indentation::Indent) 35 | } else if level < self.level() { 36 | if level == 0 || self.stack.iter().find(|&&x| x == level).is_some() { 37 | let stack_level = self.stack.len(); 38 | while self.level() > level { 39 | self.stack.pop(); 40 | } 41 | Ok(Indentation::Dedent(stack_level - self.stack.len())) 42 | } else { 43 | Err("indentation level while dedenting does not match any previously indented level.") 44 | } 45 | } else { 46 | Ok(Indentation::Ondent) 47 | } 48 | } 49 | } 50 | 51 | #[cfg(test)] 52 | mod test { 53 | use super::*; 54 | 55 | #[test] 56 | fn test_indentation() { 57 | let mut indentation = IndentationLevel::new(); 58 | assert_eq!(indentation.update(1), Ok(Indentation::Indent)); 59 | assert_eq!(indentation.level(), 1); 60 | assert_eq!(indentation.update(0), Ok(Indentation::Dedent(1))); 61 | assert_eq!(indentation.level(), 0); 62 | assert_eq!(indentation.update(1), Ok(Indentation::Indent)); 63 | assert_eq!(indentation.level(), 1); 64 | assert_eq!(indentation.update(3), Ok(Indentation::Indent)); 65 | assert_eq!(indentation.level(), 3); 66 | assert_eq!( 67 | indentation.update(2), 68 | Err("indentation level while dedenting does not match any previously indented level.") 69 | ); 70 | assert_eq!(indentation.level(), 3); 71 | assert_eq!(indentation.update(0), Ok(Indentation::Dedent(2))); 72 | assert_eq!(indentation.level(), 0); 73 | } 74 | 75 | #[test] 76 | fn test_indentation_stack_size() { 77 | let mut indentation = IndentationLevel::new(); 78 | assert_eq!(indentation.update(1), Ok(Indentation::Indent)); 79 | assert_eq!(indentation.level(), 1); 80 | assert_eq!(indentation.update(2), Ok(Indentation::Indent)); 81 | assert_eq!(indentation.level(), 2); 82 | assert_eq!(indentation.update(3), Ok(Indentation::Indent)); 83 | assert_eq!(indentation.level(), 3); 84 | assert_eq!(indentation.stack.len(), 3); 85 | assert_eq!(indentation.update(2), Ok(Indentation::Dedent(1))); 86 | assert_eq!(indentation.stack.len(), 2) 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /lib/parser/src/lexer.rs: -------------------------------------------------------------------------------- 1 | use crate::indentation; 2 | use crate::indentation::Indentation; 3 | use crate::token::Token; 4 | use logos::Logos; 5 | 6 | pub type Spanned = Result<(Loc, Tok, Loc), Error>; 7 | pub type Item = Spanned; 8 | 9 | fn spanned_token_into_item(span: (Token, logos::Span)) -> Item { 10 | let range = span.1; 11 | let token = span.0; 12 | Ok((range.start, token, range.end)) 13 | } 14 | 15 | // todo: possibly change the way this works to be part of the lexing process itself 16 | // todo: clean up dedent insertion code to not use Arc, and to use proper spans. 17 | pub fn lex<'a>(source: &'a str) -> impl Iterator + 'a { 18 | let mut indentation = indentation::IndentationLevel::new(); 19 | let lexer = Token::lexer(source); 20 | 21 | lexer 22 | .spanned() 23 | // todo: find a possible way to do with without a box 24 | // convert Indentation tokens to usable Indent, Dedent, and Newline tokens. 25 | .flat_map(move |(tok, range)| match tok { 26 | Token::Indentation(level) => match indentation.update(level) { 27 | Ok(indent) => match indent { 28 | Indentation::Indent => { 29 | vec![(Token::Newline, range.clone()), (Token::Indent, range)] 30 | } 31 | Indentation::Dedent(count) => { 32 | let mut tokens = vec![ 33 | (Token::Newline, range.clone()), 34 | (Token::Dedent, range.clone()), 35 | ]; 36 | if count > 1 { 37 | for _ in 0..count { 38 | tokens.push((Token::Dedent, range.clone())); 39 | } 40 | } 41 | tokens 42 | } 43 | Indentation::Ondent => vec![(Token::Newline, range)], 44 | }, 45 | Err(err) => vec![(Token::Error(String::from(err)), range)], 46 | }, 47 | _ => vec![(tok, range)], 48 | }) 49 | .map(spanned_token_into_item) 50 | } 51 | 52 | #[cfg(test)] 53 | mod test { 54 | use super::*; 55 | use indoc::indoc; 56 | 57 | #[test] 58 | fn test_lex() { 59 | let input = indoc! {" 60 | a 61 | b 62 | c 63 | d 64 | e 65 | f 66 | g 67 | _ 68 | i 69 | "}; 70 | 71 | let lexed: Vec = lex(input).map(|t| t.unwrap().1).collect(); 72 | assert_eq!( 73 | lexed, 74 | vec![ 75 | Token::Identifier(String::from("a")), 76 | Token::Newline, 77 | Token::Indent, 78 | Token::Identifier(String::from("b")), 79 | Token::Newline, 80 | Token::Indent, 81 | Token::Identifier(String::from("c")), 82 | Token::Newline, 83 | Token::Dedent, 84 | Token::Identifier(String::from("d")), 85 | Token::Newline, 86 | Token::Indent, 87 | Token::Identifier(String::from("e")), 88 | Token::Newline, 89 | Token::Dedent, 90 | Token::Dedent, 91 | Token::Dedent, 92 | Token::Identifier(String::from("f")), 93 | Token::Newline, 94 | Token::Identifier(String::from("g")), 95 | Token::Newline, 96 | Token::Indent, 97 | Token::Symbol('_'), 98 | Token::Newline, 99 | Token::Dedent, 100 | Token::Identifier(String::from("i")), 101 | Token::Newline, 102 | ] 103 | ) 104 | } 105 | 106 | #[test] 107 | fn test_lex_dedent_insertion() { 108 | let input = indoc! {" 109 | a 110 | b 111 | c 112 | "}; 113 | 114 | let lexed: Vec = lex(input).map(|t| t.unwrap().1).collect(); 115 | assert_eq!( 116 | lexed, 117 | vec![ 118 | Token::Identifier(String::from("a")), 119 | Token::Newline, 120 | Token::Indent, 121 | Token::Identifier(String::from("b")), 122 | Token::Newline, 123 | Token::Indent, 124 | Token::Identifier(String::from("c")), 125 | Token::Newline, 126 | Token::Dedent, 127 | Token::Dedent, 128 | Token::Dedent, 129 | ] 130 | ) 131 | } 132 | 133 | #[test] 134 | fn test_lex_newlines() { 135 | let input = indoc! {" 136 | abc 137 | a 138 | b 139 | c 140 | "}; 141 | 142 | let lexed: Vec = lex(input).map(|t| t.unwrap().1).collect(); 143 | assert_eq!( 144 | lexed, 145 | vec![ 146 | Token::Identifier(String::from("abc")), 147 | Token::Newline, 148 | Token::Indent, 149 | Token::Identifier(String::from("a")), 150 | Token::Newline, 151 | Token::Identifier(String::from("b")), 152 | Token::Newline, 153 | Token::Identifier(String::from("c")), 154 | Token::Newline, 155 | Token::Dedent, 156 | ] 157 | ) 158 | } 159 | } 160 | -------------------------------------------------------------------------------- /lib/parser/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // parser - snowflake's parser 3 | // 4 | // copyright (c) 2020 the snowflake authors 5 | // this source code form is subject to the terms of the mozilla public 6 | // license, v. 2.0. if a copy of the mpl was not distributed with this 7 | // file, you can obtain one at http://mozilla.org/MPL/2.0/. 8 | // 9 | 10 | #[macro_use] 11 | extern crate lalrpop_util; 12 | pub mod ast; 13 | pub mod indentation; 14 | pub mod lexer; 15 | pub mod token; 16 | 17 | lalrpop_mod!(pub snowflake); 18 | 19 | // pub fn parse<'a>( 20 | // input: &'a str, 21 | // ) -> Result> { 22 | // let input = token::Token::lexer(input) 23 | // .spanned() 24 | // .map(spanned_token_into_item); 25 | // let mut indentation = indentation::IndentationLevel::new(); 26 | // // todo: make into ProgramParser 27 | // snowflake::StatementParser::new().parse(&mut indentation, input) 28 | // } 29 | 30 | #[cfg(test)] 31 | mod test { 32 | use super::*; 33 | use ast; 34 | use ast::Expression; 35 | use ast::OpSymbol; 36 | use ast::Pattern; 37 | use ast::Statement; 38 | use ast::Tag; 39 | use ast::Type; 40 | use indoc::indoc; 41 | use num_bigint::BigInt; 42 | use snowflake::*; 43 | 44 | impl From for ast::Expression { 45 | fn from(i: isize) -> Self { 46 | ast::Expression::Integer(BigInt::from(i)) 47 | } 48 | } 49 | 50 | impl<'a> From<&'a str> for ast::Expression { 51 | fn from(s: &'a str) -> Self { 52 | ast::Expression::Identifier(String::from(s)) 53 | } 54 | } 55 | 56 | impl From for ast::Type { 57 | fn from(i: isize) -> Self { 58 | ast::Type::Nat(BigInt::from(i)) 59 | } 60 | } 61 | 62 | impl<'a> From<&'a str> for ast::Type { 63 | fn from(s: &'a str) -> Self { 64 | ast::Type::Identifier(String::from(s)) 65 | } 66 | } 67 | 68 | impl From for ast::Pattern { 69 | fn from(i: isize) -> Self { 70 | ast::Pattern::Integer(i.into()) 71 | } 72 | } 73 | 74 | impl<'a> From<&'a str> for ast::Pattern { 75 | fn from(s: &'a str) -> Self { 76 | ast::Pattern::Identifier(s.into()) 77 | } 78 | } 79 | 80 | impl<'a> From<&'a str> for ast::Tag { 81 | fn from(s: &'a str) -> Self { 82 | ast::Tag::Identifier(s.into()) 83 | } 84 | } 85 | 86 | // test parse for 87 | macro_rules! test_parse { 88 | ($path:ty where $($input:expr => $test:expr),*) => { 89 | $({ 90 | let input = lexer::lex($input); 91 | let program = <$path>::new().parse(input).unwrap(); 92 | assert_eq!(program, $test) 93 | })* 94 | }; 95 | } 96 | 97 | fn ops( 98 | l: impl Into, 99 | op: ast::OpSymbol, 100 | r: impl Into, 101 | ) -> ast::Expression { 102 | ast::Expression::OpCall { 103 | op: op, 104 | args: vec![Box::new(l.into()), Box::new(r.into())], 105 | } 106 | } 107 | 108 | #[test] 109 | fn parse_identifier() { 110 | test_parse! { 111 | IdentifierParser where 112 | "name" => 113 | String::from("name"), 114 | "name_with_underscores_numbers_and_is_long_1234" => 115 | String::from("name_with_underscores_numbers_and_is_long_1234") 116 | } 117 | } 118 | 119 | #[test] 120 | fn parse_integer() { 121 | test_parse! { 122 | IntegerParser where 123 | "132" => BigInt::from(132), 124 | "123_456_789" => BigInt::from(123_456_789) 125 | } 126 | } 127 | 128 | #[test] 129 | fn parse_literal() { 130 | test_parse! { 131 | LiteralParser where 132 | "132" => Expression::Integer( 133 | BigInt::from(132) 134 | ), 135 | "123_456_789" => Expression::Integer( 136 | BigInt::from(123_456_789) 137 | ), 138 | "name" => Expression::Identifier( 139 | String::from("name") 140 | ), 141 | "name_with_underscores_numbers_and_is_long_1234" => Expression::Identifier( 142 | String::from("name_with_underscores_numbers_and_is_long_1234") 143 | ) 144 | } 145 | } 146 | 147 | #[test] 148 | fn parse_op_call() { 149 | test_parse! { 150 | OpCallParser where 151 | "1 + 1" => ast::Expression::OpCall { 152 | op: ast::OpSymbol::Plus, 153 | args: vec![ 154 | Box::new(1.into()), 155 | Box::new(1.into()), 156 | ] 157 | }, 158 | // should parse as 159 | // (1 + (2 * (3 - (4 / 5)))) 160 | // although alternatively it could be made to parse like 161 | // ((((1 + 2) * 3) - 4) / 5) 162 | "1 + 2 * 3 - 4 / 5" => { 163 | use ast::OpSymbol::*; 164 | ops(1, Plus, ops(2, Star, ops(3, Minus, ops(4, ForwardSlash, 5)))) 165 | } 166 | } 167 | } 168 | 169 | #[test] 170 | fn parse_block() { 171 | test_parse! { 172 | BlockParser where 173 | "\n 123\n abc\n 123\n " => vec![ 174 | Box::new(123.into()), 175 | Box::new("abc".into()), 176 | Box::new(123.into()), 177 | ] 178 | } 179 | } 180 | 181 | #[test] 182 | fn parse_fn_decl() { 183 | test_parse! { 184 | FnDeclParser where 185 | "add a b => a + b\n" => Statement::FnDecl { 186 | name: String::from("add"), 187 | args: vec![ 188 | String::from("a"), 189 | String::from("b") 190 | ], 191 | body: vec![ 192 | Box::new( 193 | Expression::OpCall { 194 | op: ast::OpSymbol::Plus, 195 | args: vec![ 196 | Box::new(Expression::from("a")), 197 | Box::new(ast::Expression::from("b")) 198 | ] 199 | } 200 | ) 201 | ] 202 | }, 203 | "add a b =>\n a + b\n" => Statement::FnDecl { 204 | name: "add".into(), 205 | args: vec!["a".into(), "b".into()], 206 | body: vec![ 207 | Box::new( 208 | Expression::OpCall { 209 | op: ast::OpSymbol::Plus, 210 | args: vec![ 211 | Box::new("a".into()), 212 | Box::new("b".into()) 213 | ] 214 | } 215 | ) 216 | ] 217 | }, 218 | "exp a b => (a * a) + (b * b)\n" => Statement::FnDecl { 219 | name: "exp".into(), 220 | args: vec!["a".into(), "b".into()], 221 | body: vec![ 222 | Box::new(ops( 223 | ops("a", OpSymbol::Star, "a"), 224 | OpSymbol::Plus, 225 | ops("b", OpSymbol::Star, "b") 226 | )) 227 | ] 228 | }, 229 | "exp a b =>\n (a * a) + (b * b)\n" => Statement::FnDecl { 230 | name: "exp".into(), 231 | args: vec!["a".into(), "b".into()], 232 | body: vec![ 233 | Box::new(ops( 234 | ops("a", OpSymbol::Star, "a"), 235 | OpSymbol::Plus, 236 | ops("b", OpSymbol::Star, "b") 237 | )) 238 | ] 239 | } 240 | } 241 | } 242 | 243 | #[test] 244 | fn parse_fn_call() { 245 | test_parse! { 246 | FnCallParser where 247 | "add 1 2" => Expression::FnCall { 248 | name: "add".into(), 249 | args: vec![ 250 | 1.into(), 251 | 2.into(), 252 | ] 253 | }, 254 | "add a b" => Expression::FnCall { 255 | name: "add".into(), 256 | args: vec![ 257 | "a".into(), 258 | "b".into(), 259 | ] 260 | }, 261 | "add a b c" => Expression::FnCall { 262 | name: "add".into(), 263 | args: vec![ 264 | "a".into(), 265 | "b".into(), 266 | "c".into(), 267 | ] 268 | } 269 | } 270 | } 271 | 272 | #[test] 273 | fn parse_expression() { 274 | test_parse! { 275 | ExpressionParser where 276 | "1 + 2" => ops(1, OpSymbol::Plus, 2), 277 | "1 + (2 * 3)" => ops(1, OpSymbol::Plus, ops(2, OpSymbol::Star, 3)), 278 | "(1 + 2) * 3" => ops(ops(1, OpSymbol::Plus, 2), OpSymbol::Star, 3) 279 | } 280 | } 281 | 282 | #[test] 283 | fn parse_statement() { 284 | test_parse! { 285 | StatementParser where 286 | "add :: int int -> int" => Statement::TypeDecl { 287 | name: "add".into(), 288 | body: Type::FnSig { 289 | args: vec![ 290 | Box::new("int".into()), 291 | Box::new("int".into()) 292 | ], 293 | ret: Box::new("int".into()) 294 | }, 295 | }, 296 | "add a b => a + b\n" => Statement::FnDecl { 297 | name: String::from("add"), 298 | args: vec![ 299 | String::from("a"), 300 | String::from("b") 301 | ], 302 | body: vec![ 303 | Box::new(Expression::OpCall { 304 | op: ast::OpSymbol::Plus, 305 | args: vec![ 306 | Box::new(Expression::from("a")), 307 | Box::new(ast::Expression::from("b")) 308 | ] 309 | }) 310 | ] 311 | } 312 | } 313 | } 314 | 315 | // todo: needs actual tests 316 | #[test] 317 | fn should_parse_question() { 318 | // mix/copy from rust and haskell one lol 319 | // todo: 320 | let bad_example = indoc! {" 321 | question :: tag *examples ^ question ^ example 322 | question :: string string -> string 323 | question prompt valid => 324 | println prompt 325 | match len valid > 0 => 326 | true => print \"(\" join valid \",\" \")\" 327 | 328 | print \": \" 329 | flush stdout 330 | let input = read_line stdin 331 | 332 | match contains line valid => 333 | true => return input 334 | 335 | println input \" is not a valid answer!\" 336 | question prompt valid 337 | 338 | main => 339 | question \"foo\" [\"bar\", \"baz\"] 340 | 341 | "}; 342 | 343 | let input = lexer::lex(bad_example); 344 | let _program = ProgramParser::new().parse(input).unwrap(); 345 | // assert_eq!(program.is_err(), false) 346 | } 347 | 348 | #[test] 349 | fn should_parse_cat_dog() { 350 | let bad_example = indoc! {" 351 | let #{ cat_function dog_function } = tag *cat^dog in 352 | cat_function dog_function 353 | "}; 354 | let input = lexer::lex(bad_example); 355 | let _program = ExpressionParser::new().parse(input).unwrap(); 356 | // assert_eq!(program.is_err(), false) 357 | } 358 | 359 | #[test] 360 | fn assignment_test() { 361 | // todo: remove \n requirement after certain expr/statement 362 | let assign_input = indoc! {" 363 | add a => 364 | let b = 0 in 365 | a + b 366 | 367 | 368 | "}; 369 | 370 | test_parse! { 371 | ProgramParser where 372 | assign_input => vec![ 373 | Statement::FnDecl { 374 | name: "add".into(), 375 | args: vec!["a".into()], 376 | body: vec![ 377 | Box::new(Expression::ValueDecl { 378 | assigns: vec![Box::new(Expression::ValueAssign { 379 | pat: "b".into(), 380 | expr: Box::new(0.into()) 381 | })], 382 | body: Some(vec![Box::new( 383 | ops("a", OpSymbol::Plus, "b") 384 | )]) 385 | }) 386 | ] 387 | } 388 | ] 389 | } 390 | } 391 | #[test] 392 | fn parse_program() { 393 | let type_decl_input = indoc! {" 394 | fib :: isize -> isize 395 | "}; 396 | 397 | // todo: add end of input newline/dedent insertions for situations like this. 398 | // todo: add a way to match n 399 | let fn_decl_input = indoc! {" 400 | fib n => 401 | (fib n - 1) + (fib n - 2) 402 | 403 | "}; 404 | 405 | let full_input = indoc! {" 406 | fib :: isize -> isize 407 | fib n => 408 | (fib n - 1) + (fib n - 2) 409 | 410 | "}; 411 | 412 | test_parse! { 413 | ProgramParser where 414 | "" => vec![], 415 | type_decl_input => vec![ 416 | Statement::TypeDecl { 417 | name: "fib".into(), 418 | body: Type::FnSig { 419 | args: vec![ 420 | Box::new("isize".into()) 421 | ], 422 | ret: Box::new("isize".into()) 423 | } 424 | } 425 | ], 426 | fn_decl_input => vec![ 427 | Statement::FnDecl { 428 | name: "fib".into(), 429 | args: vec!["n".into()], 430 | body: vec![ 431 | Box::new(ops( 432 | Expression::FnCall { 433 | name: "fib".into(), 434 | args: vec![ 435 | ops("n", OpSymbol::Minus, 1) 436 | ] 437 | }, 438 | OpSymbol::Plus, 439 | Expression::FnCall { 440 | name: "fib".into(), 441 | args: vec![ 442 | ops("n", OpSymbol::Minus, 2) 443 | ] 444 | }, 445 | )) 446 | ] 447 | } 448 | ], 449 | full_input => vec![ 450 | Statement::TypeDecl { 451 | name: "fib".into(), 452 | body: Type::FnSig { 453 | args: vec![ 454 | Box::new("isize".into()) 455 | ], 456 | ret: Box::new("isize".into()) 457 | } 458 | }, 459 | Statement::FnDecl { 460 | name: "fib".into(), 461 | args: vec!["n".into()], 462 | body: vec![ 463 | Box::new(ops( 464 | Expression::FnCall { 465 | name: "fib".into(), 466 | args: vec![ 467 | ops("n", OpSymbol::Minus, 1) 468 | ] 469 | }, 470 | OpSymbol::Plus, 471 | Expression::FnCall { 472 | name: "fib".into(), 473 | args: vec![ 474 | ops("n", OpSymbol::Minus, 2) 475 | ] 476 | }, 477 | )) 478 | ] 479 | } 480 | ] 481 | } 482 | } 483 | 484 | // TypeExpression tests. 485 | 486 | #[test] 487 | fn parse_fn_sig() { 488 | test_parse! { 489 | FnSigParser where 490 | "int int -> int" => Type::FnSig { 491 | args: vec![ 492 | Box::new("int".into()), 493 | Box::new("int".into()) 494 | ], 495 | ret: Box::new("int".into()) 496 | }, 497 | "int int -> int -> int" => Type::FnSig { 498 | args: vec![ 499 | Box::new("int".into()), 500 | Box::new("int".into()) 501 | ], 502 | ret: Box::new(Type::FnSig { 503 | args: vec![ 504 | Box::new("int".into()) 505 | ], 506 | ret: Box::new("int".into()) 507 | }) 508 | } 509 | } 510 | } 511 | 512 | #[test] 513 | fn parse_type_decl() { 514 | test_parse! { 515 | TypeDeclParser where 516 | "fib :: int int -> int" => Statement::TypeDecl { 517 | name: "fib".into(), 518 | body: Type::FnSig { 519 | args: vec![ 520 | Box::new("int".into()), 521 | Box::new("int".into()) 522 | ], 523 | ret: Box::new("int".into()) 524 | }, 525 | } 526 | } 527 | } 528 | 529 | #[test] 530 | fn parse_pattern() { 531 | test_parse! { 532 | MatchPartParser where 533 | "name => 1 + 1\n" => Expression::Destructure { 534 | pat: "name".into(), 535 | body: vec![ 536 | Box::new( 537 | ops(1, OpSymbol::Plus, 1) 538 | ) 539 | ] 540 | }, 541 | "_ => 1 + 1\n" => Expression::Destructure { 542 | pat: Pattern::Wildcard, 543 | body: vec![ 544 | Box::new( 545 | ops(1, OpSymbol::Plus, 1) 546 | ) 547 | ] 548 | }, 549 | "0..2 => 1 + 1\n" => Expression::Destructure { 550 | pat: Pattern::Range { 551 | start: Some(Box::new(0.into())), 552 | end: Some(Box::new(2.into())) 553 | }, 554 | body: vec![ 555 | Box::new( 556 | ops(1, OpSymbol::Plus, 1) 557 | ) 558 | ] 559 | } 560 | } 561 | } 562 | 563 | #[test] 564 | fn parse_match() { 565 | let expr = indoc! {" 566 | match n => 567 | 0 => n 568 | 1 => n 569 | _ => fib n 570 | "}; 571 | 572 | test_parse! { 573 | MatchParser where 574 | expr => Expression::Match { 575 | expr: Box::new("n".into()), 576 | args: vec![ 577 | Expression::Destructure { 578 | pat: 0.into(), 579 | body: vec![ 580 | Box::new("n".into()), 581 | ] 582 | }, 583 | Expression::Destructure { 584 | pat: 1.into(), 585 | body: vec![ 586 | Box::new("n".into()), 587 | ] 588 | }, 589 | Expression::Destructure { 590 | pat: Pattern::Wildcard, 591 | body: vec![ 592 | Box::new( 593 | Expression::FnCall { 594 | name: "fib".into(), 595 | args: vec!["n".into()] 596 | } 597 | ), 598 | ] 599 | }, 600 | ] 601 | } 602 | } 603 | } 604 | 605 | #[test] 606 | fn expr_type_decl() { 607 | test_parse! { 608 | ExpressionParser where 609 | "(1 + 1) :: Int" => Expression::TypeDecl { 610 | ty: "Int".into(), 611 | expr: Box::new(ops(1, OpSymbol::Plus, 1)) 612 | }, 613 | "(add (1) :: Int (2) :: Int) :: Int" => Expression::TypeDecl { 614 | ty: "Int".into(), 615 | expr: Box::new(Expression::FnCall { 616 | name: "add".into(), 617 | args: vec![ 618 | Expression::TypeDecl { 619 | ty: "Int".into(), 620 | expr: Box::new(1.into()) 621 | }, 622 | Expression::TypeDecl { 623 | ty: "Int".into(), 624 | expr: Box::new(2.into()) 625 | } 626 | ] 627 | }) 628 | } 629 | } 630 | } 631 | 632 | #[test] 633 | fn tag_decl() { 634 | test_parse! { 635 | TagDeclParser where 636 | "tag a^b" => Tag::OpCall { 637 | op: OpSymbol::Circumflex, 638 | args: vec![ 639 | Box::new("a".into()), 640 | Box::new("b".into()) 641 | ] 642 | }, 643 | "tag a^(*b)" => Tag::OpCall { 644 | op: OpSymbol::Circumflex, 645 | args: vec![ 646 | Box::new("a".into()), 647 | Box::new(Tag::PrimaryIdentifier("b".into())) 648 | ] 649 | } 650 | } 651 | } 652 | } 653 | -------------------------------------------------------------------------------- /lib/parser/src/snowflake.lalrpop: -------------------------------------------------------------------------------- 1 | // 2 | // parser - snowflake's parser 3 | // 4 | // copyright (c) 2020 the snowflake authors 5 | // this source code form is subject to the terms of the mozilla public 6 | // license, v. 2.0. if a copy of the mpl was not distributed with this 7 | // file, you can obtain one at http://mozilla.org/MPL/2.0/. 8 | // 9 | 10 | // note(@bree): no decendents of ast should be "use"d 11 | // to preserve a visual separation between parsing and ast 12 | use crate::ast; 13 | use crate::token::Token; 14 | use num_bigint::BigInt; 15 | 16 | grammar; 17 | 18 | pub Program: Vec = { 19 | ProgramLine* => <>.into_iter().flatten().collect() 20 | } 21 | 22 | pub ProgramLine: Vec = { 23 | "\n" => vec![s], 24 | "\n" => vec![] 25 | } 26 | 27 | // Begin Types 28 | pub TypeStatement: ast::Statement = { 29 | TypeDecl 30 | } 31 | 32 | pub TypeDecl: ast::Statement = { 33 | "::" => ast::Statement::TypeDecl { 34 | name: name, 35 | body: expr 36 | } 37 | } 38 | 39 | pub TypeExpression: ast::Type = { 40 | SubTypeExpression, 41 | FnSig, 42 | TagDecl => ast::Type::Tag(<>), 43 | } 44 | 45 | pub SubTypeExpression: ast::Type = { 46 | "(" ")" => e, 47 | TypeLiteral 48 | } 49 | 50 | pub FnSig: ast::Type = { 51 | "->" => { 52 | let mut out_args = vec![]; 53 | for arg in args { 54 | out_args.push(Box::new(arg)) 55 | }; 56 | ast::Type::FnSig { 57 | args: out_args, 58 | ret: Box::new(ret) 59 | } 60 | } 61 | } 62 | 63 | pub TypeLiteral: ast::Type = { 64 | Integer => ast::Type::Nat(<>), 65 | Identifier => ast::Type::Identifier(<>), 66 | } 67 | // End Types 68 | 69 | pub Statement: ast::Statement = { 70 | TypeStatement, 71 | FnDecl, 72 | // ValueDeclStatement, 73 | // Expression => ast::Statement::Expression(<>) 74 | } 75 | 76 | pub FnDecl: ast::Statement = { 77 | "=>" => ast::Statement::FnDecl { 78 | name: name, 79 | args: args, 80 | body: body, 81 | } 82 | } 83 | 84 | // TODO(superwhiskers): potentially fix this 85 | // pub ValueDeclStatement: ast::Statement = { 86 | // ValueDecl => ast::Statement::Expression(<>) 87 | // } 88 | 89 | pub Match: ast::Expression = { 90 | "match" "=>" => ast::Expression::Match { 91 | expr: Box::new(expr), 92 | args: parts, 93 | } 94 | } 95 | 96 | // todo: macro/generic 97 | pub MatchBlock: Vec = { 98 | "\n" Indent Dedent => patterns, 99 | } 100 | 101 | pub MatchPart: ast::Expression = { 102 | "=>" => ast::Expression::Destructure { 103 | pat: pat, 104 | body: body, 105 | } 106 | } 107 | 108 | pub Pattern: ast::Pattern = { 109 | RangePattern, 110 | LiteralPattern, 111 | WildcardPattern, 112 | } 113 | 114 | pub LiteralPattern: ast::Pattern = { 115 | Integer => ast::Pattern::Integer(<>), 116 | Identifier => ast::Pattern::Identifier(<>), 117 | "String" => ast::Pattern::StringLiteral(<>), 118 | } 119 | 120 | pub RangePattern: ast::Pattern = { 121 | ".." => ast::Pattern::Range { 122 | start: Some(Box::new(start)), 123 | end: Some(Box::new(end)), 124 | } 125 | } 126 | 127 | pub WildcardPattern: ast::Pattern = { 128 | "_" => ast::Pattern::Wildcard, 129 | } 130 | 131 | pub ValueDecl: ast::Expression = { 132 | // todo: LetIn needs to be transformed into something more usable 133 | "let" > "in" => ast::Expression::ValueDecl { 134 | assigns: assigns.into_iter().map(|e| Box::new(e)).collect(), 135 | body: Some(block) 136 | }, 137 | // todo: change this because \n is a hack to make this work atm due to ambiguity like "let a = let b ..." 138 | "let" "\n" => ast::Expression::ValueDecl { 139 | assigns: vec![Box::new(assign)], 140 | body: None 141 | }, 142 | ValueAssign, 143 | } 144 | 145 | pub ValueAssign: ast::Expression = { 146 | "=" => ast::Expression::ValueAssign { 147 | pat: p, 148 | expr: Box::new(e) 149 | }, 150 | "=" => ast::Expression::TagAssign { 151 | tag: pat, 152 | expr: tag, 153 | } 154 | } 155 | 156 | // (Main)Expression and SubExpression are a solution to remove abiguity issues with FnCall 157 | // for exampe: "add a b" could be seen as able to be parsed multiple ways { add(a, b) or add(a(b)) } 158 | // what this means is that FnCall can't directly have another FnCall in it without some other enclosure. 159 | pub Expression: ast::Expression = { 160 | SubExpression, 161 | FnCall, 162 | Match, 163 | ValueDecl, 164 | } 165 | 166 | pub SubExpression: ast::Expression = { 167 | OpCall, 168 | CircumfixCall, 169 | } 170 | 171 | pub Block: Vec> = { 172 | => vec![s], 173 | "\n" Indent Dedent => s 174 | } 175 | 176 | pub ExpressionStatement: Box = { 177 | "\n" => Box::new(e) 178 | } 179 | 180 | pub OpCall: ast::Expression = { 181 | => ast::Expression::OpCall { 182 | op: op, 183 | args: vec![ 184 | Box::new(l), 185 | Box::new(r), 186 | ] 187 | }, 188 | Atom 189 | } 190 | 191 | // todo: make this not expr, also make List not hardcoded myabe based but macro/ast 192 | pub CircumfixCall: ast::Expression = { 193 | "[" > "]" => ast::Expression::List(exprs.into_iter().map(|e| Box::new(e)).collect()), 194 | } 195 | 196 | // note(@bree): possibly future use, may remove. 197 | pub Atom: ast::Expression = { 198 | "(" ")" "::" => ast::Expression::TypeDecl { 199 | ty: ty, 200 | expr: Box::new(expr), 201 | }, 202 | "(" ")" => e, 203 | Literal, 204 | } 205 | 206 | // FnCall has a rather ambiguous grammar 207 | // any change to the grammer has a possibility of conflicting with FnCall 208 | pub FnCall: ast::Expression = { 209 | => ast::Expression::FnCall { 210 | name: name, 211 | args: args 212 | } 213 | } 214 | 215 | pub TagDecl: ast::Tag = { 216 | "tag" => expr 217 | } 218 | 219 | pub TagExpression: ast::Tag = { 220 | TagOpCall, 221 | } 222 | 223 | pub TagOpCall: ast::Tag = { 224 | => ast::Tag::OpCall { 225 | op: op, 226 | args: vec![Box::new(l), Box::new(r)] 227 | }, 228 | TagAtom, 229 | } 230 | 231 | pub TagAtom: ast::Tag = { 232 | "(" ")" => e, 233 | TagLiteral 234 | } 235 | 236 | pub TagLiteral: ast::Tag = { 237 | "*" => ast::Tag::PrimaryIdentifier(name), 238 | Identifier => ast::Tag::Identifier(<>) 239 | } 240 | 241 | pub TagAssign: ast::Tag = { 242 | "#{" "}" => ast::Tag::Assign { 243 | pats: pats.into_iter().map(|p| Box::new(p)).collect(), 244 | } 245 | } 246 | 247 | pub TagPattern: ast::Tag = { 248 | TagLiteral 249 | } 250 | 251 | pub Op: ast::OpSymbol = { 252 | "+" => ast::OpSymbol::Plus, 253 | "-" => ast::OpSymbol::Minus, 254 | "*" => ast::OpSymbol::Star, 255 | "/" => ast::OpSymbol::ForwardSlash, 256 | "<" => ast::OpSymbol::LAngleBracket, 257 | ">" => ast::OpSymbol::RAngleBracket, 258 | "^" => ast::OpSymbol::Circumflex, 259 | } 260 | 261 | pub Literal: ast::Expression = { 262 | Integer => ast::Expression::Integer(<>), 263 | Identifier => ast::Expression::Identifier(<>), 264 | "String" => ast::Expression::StringLiteral(<>) 265 | } 266 | 267 | pub Integer: BigInt = { 268 | "Integer" 269 | } 270 | 271 | pub Identifier: String = { 272 | "Identifier" 273 | } 274 | 275 | NonEmptyListOf: Vec = { 276 | Seperator)*> => { 277 | let mut values = values; 278 | values.push(value); 279 | values 280 | } 281 | } 282 | 283 | ListOf: Vec = { 284 | Seperator)*> => match value { 285 | None => values, 286 | Some(value) => { 287 | let mut values = values; 288 | values.push(value); 289 | values 290 | } 291 | } 292 | } 293 | 294 | extern { 295 | type Location = usize; 296 | type Error = String; 297 | 298 | enum Token { 299 | "Identifier" => Token::Identifier(), 300 | "Float" => Token::Float(), 301 | "Integer" => Token::Integer(), 302 | "String" => Token::StringLiteral(), 303 | "match" => Token::Match, 304 | "let" => Token::Let, 305 | "in" => Token::In, 306 | "tag" => Token::Tag, 307 | "=" => Token::Equal, 308 | ".." => Token::DotDot, 309 | "::" => Token::ColonColon, 310 | "**" => Token::StarStar, 311 | "=>" => Token::LargeArrowRight, 312 | "->" => Token::SmallArrowRight, 313 | "#{" => Token::TagStart, 314 | "Whitespace" => Token::Whitespace, 315 | // "Symbol" => Token::Symbol(), 316 | // indentation 317 | Indent => Token::Indent, 318 | Dedent => Token::Dedent, 319 | "\n" => Token::Newline, 320 | // symbols 321 | "(" => Token::Symbol('('), 322 | ")" => Token::Symbol(')'), 323 | "<" => Token::Symbol('<'), 324 | ">" => Token::Symbol('>'), 325 | "[" => Token::Symbol('['), 326 | "]" => Token::Symbol(']'), 327 | "{" => Token::Symbol('{'), 328 | "}" => Token::Symbol('}'), 329 | "+" => Token::Symbol('+'), 330 | "-" => Token::Symbol('-'), 331 | "*" => Token::Symbol('*'), 332 | "/" => Token::Symbol('/'), 333 | "_" => Token::Symbol('_'), 334 | ";" => Token::Symbol(';'), 335 | "," => Token::Symbol(','), 336 | "^" => Token::Symbol('^'), 337 | } 338 | } 339 | -------------------------------------------------------------------------------- /lib/parser/src/token.rs: -------------------------------------------------------------------------------- 1 | // 2 | // parser - snowflake's parser 3 | // 4 | // copyright (c) 2020 the snowflake authors 5 | // this source code form is subject to the terms of the mozilla public 6 | // license, v. 2.0. if a copy of the mpl was not distributed with this 7 | // file, you can obtain one at http://mozilla.org/MPL/2.0/. 8 | // 9 | 10 | use logos::Logos; 11 | use num_bigint::BigInt; 12 | 13 | fn lex_char(lex: &mut logos::Lexer) -> Option { 14 | lex.source().chars().nth(lex.span().start) 15 | } 16 | 17 | #[derive(Logos, Clone, Debug, PartialEq)] 18 | pub enum Token { 19 | #[regex("[a-zA-Z][a-zA-Z1-9_]*", |lex| lex.slice().parse())] 20 | Identifier(String), 21 | 22 | #[regex(r"[0-9]+\.[0-9]+", |lex| lex.slice().parse())] 23 | Float(f64), 24 | 25 | #[regex("[0-9][0-9_]*", |lex| lex.slice().parse())] 26 | Integer(BigInt), 27 | // todo: remove "" 28 | #[regex("\"[^\"]+\"", |lex| lex.slice().parse())] 29 | StringLiteral(String), 30 | 31 | // replaced with inserted tokens 32 | #[regex("\n( )*", |lex| ((lex.slice().len() - 1) / 2))] 33 | Indentation(usize), 34 | 35 | #[token("match")] 36 | Match, 37 | 38 | #[token("let")] 39 | Let, 40 | 41 | #[token("in")] 42 | In, 43 | 44 | #[token("tag")] 45 | Tag, 46 | 47 | // todo: eventually give proper names to some of these 48 | // "non-symbol" character combinations 49 | #[token("=")] 50 | Equal, 51 | 52 | #[token("::")] 53 | ColonColon, 54 | 55 | #[token("..")] 56 | DotDot, 57 | 58 | #[token("**")] 59 | StarStar, 60 | 61 | #[token("=>")] 62 | LargeArrowRight, 63 | 64 | #[token("->")] 65 | SmallArrowRight, 66 | 67 | // todo: remove the need for this 68 | #[token("#{")] 69 | TagStart, 70 | 71 | #[regex(r"\s", logos::skip)] 72 | Whitespace, 73 | 74 | #[regex(r"[!-/:-@\[-`{-~()_<>]", lex_char)] 75 | Symbol(char), 76 | 77 | #[regex(r".", lex_char, priority = 0)] 78 | Unknown(char), 79 | 80 | #[error] 81 | LexError, 82 | 83 | // inserted tokens 84 | Newline, 85 | Indent, 86 | Dedent, 87 | Error(String), 88 | } 89 | 90 | #[cfg(test)] 91 | mod tests { 92 | use super::*; 93 | use indoc::indoc; 94 | use Token::*; 95 | 96 | #[test] 97 | fn lex_test() { 98 | let source = indoc! {" 99 | fib :: isize -> isize 100 | fib n => match n => 101 | ..2 => n 102 | _ => (fib n - 1) + (fib n - 2) 103 | 104 | main => 105 | println \"Hello World!\" 106 | println fib 5 107 | "}; 108 | let tokens: Vec<_> = Token::lexer(source).collect(); 109 | assert_eq!( 110 | tokens, 111 | vec![ 112 | Identifier(String::from("fib")), 113 | ColonColon, 114 | Identifier(String::from("isize")), 115 | SmallArrowRight, 116 | Identifier(String::from("isize")), 117 | Indentation(0), 118 | Identifier(String::from("fib")), 119 | Identifier(String::from("n")), 120 | LargeArrowRight, 121 | Match, 122 | Identifier(String::from("n")), 123 | LargeArrowRight, 124 | Indentation(1), 125 | DotDot, 126 | Integer(BigInt::from(2)), 127 | LargeArrowRight, 128 | Identifier(String::from("n")), 129 | Indentation(1), 130 | Symbol('_'), 131 | LargeArrowRight, 132 | Symbol('('), 133 | Identifier(String::from("fib")), 134 | Identifier(String::from("n")), 135 | Symbol('-'), 136 | Integer(BigInt::from(1)), 137 | Symbol(')'), 138 | Symbol('+'), 139 | Symbol('('), 140 | Identifier(String::from("fib")), 141 | Identifier(String::from("n")), 142 | Symbol('-'), 143 | Integer(BigInt::from(2)), 144 | Symbol(')'), 145 | // the final newline is from indoc 146 | Indentation(0), 147 | Indentation(0), 148 | Identifier(String::from("main")), 149 | LargeArrowRight, 150 | Indentation(1), 151 | Identifier(String::from("println")), 152 | StringLiteral(String::from("\"Hello World!\"")), 153 | Indentation(1), 154 | Identifier(String::from("println")), 155 | Identifier(String::from("fib")), 156 | Integer(BigInt::from(5)), 157 | Indentation(0), 158 | ] 159 | ) 160 | } 161 | 162 | #[test] 163 | fn lex_indent_test() { 164 | let source = indoc! {" 165 | block => 166 | 123 167 | abc 168 | 123 169 | in 170 | #{} 171 | tag 172 | "}; 173 | let tokens: Vec<_> = Token::lexer(source).collect(); 174 | assert_eq!( 175 | tokens, 176 | vec![ 177 | Identifier(String::from("block")), 178 | LargeArrowRight, 179 | Indentation(1), 180 | Integer(BigInt::from(123)), 181 | Indentation(1), 182 | Identifier(String::from("abc")), 183 | Indentation(1), 184 | Integer(BigInt::from(123)), 185 | Indentation(1), 186 | In, 187 | Indentation(1), 188 | TagStart, 189 | Symbol('}'), 190 | Indentation(1), 191 | Tag, 192 | Indentation(0), 193 | ] 194 | ) 195 | } 196 | } 197 | -------------------------------------------------------------------------------- /lib/tag/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "tag" 3 | version = "0.1.0" 4 | authors = ["the snowflake authors "] 5 | edition = "2018" 6 | license = "MPL-2.0" 7 | 8 | [dependencies] 9 | id-arena = "^2" 10 | sdset = "^0.4" 11 | thiserror = "^1" 12 | -------------------------------------------------------------------------------- /lib/tag/src/lib.rs: -------------------------------------------------------------------------------- 1 | // 2 | // tag - snowflake's tagging library backend 3 | // 4 | // copyright (c) 2020 the snowflake authors 5 | // this source code form is subject to the terms of the mozilla public 6 | // license, v. 2.0. if a copy of the mpl was not distributed with this 7 | // file, you can obtain one at http://mozilla.org/MPL/2.0/. 8 | // 9 | 10 | //! The library behind the tag system that backs snowflake 11 | //! 12 | //! # What is a tag? 13 | //! 14 | //! A tag is, in essence, a [mathematical set] with a few changes. The most important of these 15 | //! changes is that there are two kinds: _primary tags_ and _secondary tags_. 16 | //! 17 | //! A _primary tag_ is equivalent to a mathematical sets in all regards, with one exception 18 | //! documented below. A _secondary tag_ is the same but without a uniqueness restriction. 19 | //! 20 | //! There is one more change, and that is that due to how tags work (they're collections of 21 | //! bindings from a name to a value), the uniqueness restriction is only enforced on the name of 22 | //! the binding, and not at all on the value 23 | //! 24 | //! # I don't understand this. Can you explain it a little simpler? 25 | //! 26 | //! If you've ever used a [venn diagram], they can be visualized in the same way. Tag intersections 27 | //! are where the components overlap, unions are any two components combined, symmetric difference 28 | //! (xor) is everything that doesn't overlap, and so on... 29 | //! 30 | //! # How do I use this? 31 | //! 32 | //! todo 33 | //! 34 | //! [mathematical set]: https://en.wikipedia.org/wiki/Set_theory 35 | //! [venn diagram]: https://en.wikipedia.org/wiki/Venn_diagram 36 | 37 | #![allow(clippy::cognitive_complexity)] 38 | #![warn(clippy::cargo_common_metadata)] 39 | #![warn(clippy::dbg_macro)] 40 | #![warn(clippy::explicit_deref_methods)] 41 | #![warn(clippy::filetype_is_file)] 42 | #![warn(clippy::imprecise_flops)] 43 | #![warn(clippy::large_stack_arrays)] 44 | #![warn(clippy::todo)] 45 | #![warn(clippy::unimplemented)] 46 | #![deny(clippy::await_holding_lock)] 47 | #![deny(clippy::cast_lossless)] 48 | #![deny(clippy::clone_on_ref_ptr)] 49 | #![deny(clippy::doc_markdown)] 50 | #![deny(clippy::empty_enum)] 51 | #![deny(clippy::enum_glob_use)] 52 | #![deny(clippy::exit)] 53 | #![deny(clippy::explicit_into_iter_loop)] 54 | #![deny(clippy::explicit_iter_loop)] 55 | #![deny(clippy::fallible_impl_from)] 56 | #![deny(clippy::inefficient_to_string)] 57 | #![deny(clippy::large_digit_groups)] 58 | #![deny(clippy::wildcard_dependencies)] 59 | #![deny(clippy::wildcard_imports)] 60 | #![deny(clippy::unused_self)] 61 | #![deny(clippy::single_match_else)] 62 | #![deny(clippy::option_option)] 63 | #![deny(clippy::mut_mut)] 64 | #![feature(bool_to_option)] 65 | #![feature(const_fn)] 66 | #![feature(decl_macro)] 67 | #![feature(concat_idents)] 68 | #![feature(or_patterns)] 69 | 70 | use id_arena::{Arena, ArenaBehavior, DefaultArenaBehavior}; 71 | use sdset::{duo::OpBuilder, Error as SdsetError, Set, SetOperation}; 72 | use std::{borrow::Cow, clone::Clone, collections::hash_map::HashMap}; 73 | use thiserror::Error; 74 | 75 | /// A builder-like type, used in construction of a [`Binding`] 76 | /// 77 | /// [`Binding`]: ./struct.Binding.html 78 | #[derive(Debug, Default)] 79 | pub struct BindingBuilder<'a, T> 80 | where 81 | T: Default + Clone, 82 | { 83 | pub(crate) binding: Binding<'a, T>, 84 | pub(crate) tags: Vec>, 85 | } 86 | 87 | impl<'a, T> BindingBuilder<'a, T> 88 | where 89 | T: Default + Clone, 90 | { 91 | /// Sets the name portion of the [`Binding`] 92 | /// 93 | /// [`Binding`]: ./struct.Binding.html 94 | pub fn set_name(&mut self, name: Cow<'a, str>) -> &mut Self { 95 | self.binding.name = name; 96 | self 97 | } 98 | 99 | /// Sets the value portion of the [`Binding`] 100 | /// 101 | /// [`Binding`]: ./struct.Binding.html 102 | pub fn set_value(&mut self, value: T) -> &mut Self { 103 | self.binding.value = value; 104 | self 105 | } 106 | 107 | /// Adds the [`Binding`] to a [`Tag`] by its [`TagName`] 108 | /// 109 | /// [`Binding`]: ./struct.Binding.html 110 | /// [`Tag`]: ./enum.Tag.html 111 | /// [`TagName`]: ./enum.TagName.html 112 | pub fn add_tag(&mut self, tag: TagName<'a>) -> &mut Self { 113 | self.tags.push(tag); 114 | self 115 | } 116 | 117 | /// Removes the provided [`TagName`] from the [`Binding`]'s current [`Tag`]s 118 | /// 119 | /// [`TagName`]: ./enum.TagName.html 120 | /// [`Binding`]: ./struct.Binding.html 121 | /// [`Tag`]: ./enum.Tag.html 122 | pub fn remove_tag(mut self, tag: TagName<'a>) -> Self { 123 | self.tags = self.tags.into_iter().filter(|t| *t != tag).collect(); 124 | self 125 | } 126 | } 127 | 128 | /// A relation between a string and its corresponding value. The string is considered to be the 129 | /// uniqueness specifier, the value has no play in equality 130 | #[derive(Debug, Default, Clone)] 131 | pub struct Binding<'a, T> 132 | where 133 | T: Default + Clone, 134 | { 135 | pub(crate) name: Cow<'a, str>, 136 | pub(crate) value: T, 137 | } 138 | 139 | impl<'a, T> PartialEq for Binding<'a, T> 140 | where 141 | T: Default + Clone, 142 | { 143 | fn eq(&self, other: &Self) -> bool { 144 | self.name == other.name 145 | } 146 | } 147 | 148 | impl<'a, T: Default + Clone> Eq for Binding<'a, T> {} 149 | 150 | /// The state of a group of [`Tag`]s. e.g. is it composed of either [`Tag::Primary`]s or 151 | /// [`Tag::Secondary`], or rather a mix of the two instead? 152 | /// 153 | /// [`Tag`]: ./enum.Tag.html 154 | /// [`Tag::Primary`]: ./enum.Tag.html#variant.Primary 155 | /// [`Tag::Secondary`]: ./enum.Tag.html#variant.Secondary 156 | #[derive(Debug, Eq, PartialEq, Clone)] 157 | pub enum TagGroupComposition { 158 | Primary, 159 | PrimaryAndSecondary, 160 | Secondary, 161 | SecondaryAndPrimary, 162 | } 163 | 164 | /// A tag's name. Used to key names to the corresponding tags in a [`HashMap`] without special 165 | /// syntax (i.e. `*`, which is used to denote a primary one in snowflake itself) 166 | /// 167 | /// [`HashMap`]: https://doc.rust-lang.org/nightly/std/collections/struct.HashMap.html 168 | #[derive(Debug, Eq, PartialEq, Hash, Clone)] 169 | pub enum TagName<'a> { 170 | Primary(Cow<'a, str>), 171 | Secondary(Cow<'a, str>), 172 | } 173 | 174 | /// A tag. Primary tags are equivalent to mathematical sets, secondary tags are the same but 175 | /// without the uniqueness restriction. Within the contained [`Binding`], only the [`name`] must be 176 | /// unique 177 | /// 178 | /// Note: there is no validation done on the names of the contained bindings; the task of ensuring 179 | /// name validity is up to you 180 | /// 181 | /// [`Binding`]: ./struct.Binding.html 182 | /// [`name`]: ./struct.Binding.html#structfield.name 183 | #[derive(Debug, PartialEq, Eq, Clone)] 184 | pub enum Tag<'a, T> 185 | where 186 | T: Default + Clone, 187 | { 188 | Primary(Vec<> as ArenaBehavior>::Id>), 189 | Secondary(Vec<> as ArenaBehavior>::Id>), 190 | } 191 | 192 | impl<'a, T> Tag<'a, T> 193 | where 194 | T: Default + Clone, 195 | { 196 | /// Retrieves a reference to the inner vector of the [`Tag`] as a [`Set`]. If the vector is not 197 | /// sorted, then it will fail. Because of this, it is recommended to call [`Tag::sort`] before 198 | /// calling this method 199 | /// 200 | /// [`Tag`]: ./enum.Tag.html 201 | /// [`Set`]: https://docs.rs/sdset/0.4.0/sdset/set/struct.Set.html 202 | /// [`Tag::sort`]: ./enum.Tag.html#method.sort 203 | pub fn as_set( 204 | &self, 205 | ) -> Result<&Set<> as ArenaBehavior>::Id>, SdsetError> { 206 | Set::new(self.as_slice()) 207 | } 208 | 209 | /// Retrieves a reference to the inner vector of the [`Tag`] 210 | /// 211 | /// [`Tag`]: ./enum.Tag.html 212 | pub fn as_slice(&self) -> &[> as ArenaBehavior>::Id] { 213 | match self { 214 | Tag::Primary(s) => s, 215 | Tag::Secondary(s) => s, 216 | } 217 | } 218 | 219 | /// Retrieves a mutable reference to the inner vector of the [`Tag`] 220 | /// 221 | /// [`Tag`]: ./enum.Tag.html 222 | pub fn as_mut_slice( 223 | &mut self, 224 | ) -> &mut [> as ArenaBehavior>::Id] { 225 | match self { 226 | Tag::Primary(s) => s, 227 | Tag::Secondary(s) => s, 228 | } 229 | } 230 | 231 | /// Sorts the [`Tag`]'s contents. Primarily useful before calling [`Tag::set`] 232 | /// 233 | /// [`Tag`]: ./enum.Tag.html 234 | /// [`Tag::set`]: ./enum.Tag.html#method.set 235 | pub fn sort(&mut self) { 236 | self.as_mut_slice().sort(); 237 | } 238 | } 239 | 240 | /// An enumeration over possible operations for a [`UniverseOperationBuilder`] to use 241 | /// 242 | /// [`UniverseOperationBuilder`]: ./struct.UniverseOperationBuilder.html 243 | #[derive(Debug, Eq, PartialEq, Clone)] 244 | pub enum UniverseOperationOp { 245 | Union, 246 | Intersection, 247 | Difference, 248 | SymmetricDifference, 249 | } 250 | 251 | /// A builder type for a [`UniverseOperationOp`] 252 | /// 253 | /// [`UniverseOperationOp`]: ./struct.UniverseOperationOp.html 254 | #[derive(Debug, Default)] 255 | pub struct UniverseOperationBuilder<'a> { 256 | tag_names: Option<(TagName<'a>, TagName<'a>)>, 257 | op: Option, 258 | } 259 | 260 | impl<'a> UniverseOperationBuilder<'a> { 261 | pub fn sets(&mut self, tags: (TagName<'a>, TagName<'a>)) -> &mut Self { 262 | self.tag_names = Some(tags); 263 | self 264 | } 265 | 266 | pub fn set_operation(&mut self, op: UniverseOperationOp) -> &mut Self { 267 | self.op = Some(op); 268 | self 269 | } 270 | } 271 | 272 | /// A builder-like type, used to ease in the creation of the [`Universe`] type 273 | /// 274 | /// [`Universe`]: ./struct.Universe.html 275 | #[derive(Debug, Default)] 276 | pub struct UniverseBuilder { 277 | pub(crate) tag_hashmap_capacity: Option, 278 | pub(crate) binding_arena_capacity: Option, 279 | } 280 | 281 | impl UniverseBuilder { 282 | /// "Builds" the builder type, returning a [`Universe`] 283 | /// 284 | /// [`Universe`]: ./struct.Universe.html 285 | fn build<'a, T>(&mut self) -> Universe<'a, T> 286 | where 287 | T: Default + Clone, 288 | { 289 | Universe { 290 | bindings: self 291 | .binding_arena_capacity 292 | .map_or_else(|| Arena::new(), |capacity| Arena::with_capacity(capacity)), 293 | tags: self.tag_hashmap_capacity.map_or_else( 294 | || HashMap::new(), 295 | |capacity| HashMap::with_capacity(capacity), 296 | ), 297 | } 298 | } 299 | 300 | /// Sets the number of elements to reserve capacity for in the tag [`HashMap`] 301 | /// 302 | /// [`HashMap`]: https://doc.rust-lang.org/nightly/std/collections/struct.HashMap.html 303 | pub fn with_tag_hashmap_capacity(&mut self, capacity: usize) -> &mut Self { 304 | self.tag_hashmap_capacity = Some(capacity); 305 | self 306 | } 307 | 308 | /// Sets the number of elements to reserve capacity for in the binding [`Arena`] 309 | /// 310 | /// [`Arena`]: https://docs.rs/id-arena/2.2.1/id_arena/struct.Arena.html 311 | pub fn with_binding_arena_capacity(&mut self, capacity: usize) -> &mut Self { 312 | self.binding_arena_capacity = Some(capacity); 313 | self 314 | } 315 | } 316 | 317 | /// A reference to an entry in a [`Universe`] 318 | /// 319 | /// [`Universe`]: ./struct.Universe.html 320 | #[derive(Clone, Eq, PartialEq, Hash)] 321 | pub struct UniverseEntry<'a, T> 322 | where 323 | T: Default + Clone, 324 | { 325 | pub binding: > as ArenaBehavior>::Id, 326 | pub tags: Vec>, 327 | } 328 | 329 | impl<'a, T> UniverseEntry<'a, T> 330 | where 331 | T: Default + Clone, 332 | { 333 | // /// Convert the [`UniverseEntry`] to an owned 334 | } 335 | 336 | /// A list of possible errors that may be encountered while working with a [`Universe`] 337 | /// 338 | /// [`Universe`]: ./struct.Universe.html 339 | #[non_exhaustive] 340 | #[derive(Error, Debug)] 341 | pub enum UniverseError { 342 | /// An error returned if such a binding already exists inside of the [`Universe`] 343 | /// 344 | /// Note: this does not contain a [`UniverseEntry`] of the existing binding due to the effort 345 | /// required to generate one 346 | /// 347 | /// [`Universe`]: ./struct.Universe.html 348 | #[error("The provided binding name is already in use")] 349 | BindingAlreadyExists, 350 | 351 | /// An error returned if no [`Tag`] corresponds to the provided [`TagName`] 352 | /// 353 | /// [`Tag`]: ./enum.Tag.html 354 | /// [`TagName`]: ./enum.TagName.html 355 | #[error("The provided TagName has no corresponding Tag")] 356 | InvalidTagName, 357 | 358 | /// An error returned if no [`Tag`]s were provided to a [`UniverseOperationBuilder`] 359 | /// 360 | /// [`Tag`]: ./enum.Tag.html 361 | /// [`UniverseOperationBuilder`]: ./struct.UniverseOperationBuilder.html 362 | #[error("Not enough Tags were provided")] 363 | NoTagsProvided, 364 | 365 | /// An error returned if no [`UniverseOperationOp`] was provided to a 366 | /// [`UniverseOperationBuilder`] 367 | /// 368 | /// [`UniverseOperationOp`]: ./enum.UniverseOperationOp.html 369 | /// [`UniverseOperationBuilder`]: ./struct.UniverseOperationBuilder.html 370 | #[error("No UniverseOperationOp was provided")] 371 | NoOperationProvided, 372 | 373 | /// An error that may be encountered while working with the sdset library 374 | #[error("An error was encountered while using the `sdset` library")] 375 | SdsetError(#[from] SdsetError), 376 | } 377 | 378 | /// A collection of [`Tag`]s and their [`Binding`]s 379 | /// 380 | /// [`Tag`]: ./enum.Tag.html 381 | /// [`Binding`]: ./struct.Binding.html 382 | #[derive(Debug, Default, Eq, PartialEq)] 383 | pub struct Universe<'a, T> 384 | where 385 | T: Default + Clone, 386 | { 387 | bindings: Arena, DefaultArenaBehavior>>, 388 | tags: HashMap, Tag<'a, T>>, 389 | } 390 | 391 | impl<'a, T> Universe<'a, T> 392 | where 393 | T: Default + Clone, 394 | { 395 | /// Creates a new [`Universe`] using a [`UniverseBuilder`]. If you would rather not use a 396 | /// builder, a [`Universe`] can be initialized using default fields using the 397 | /// [`Default::default`] trait method 398 | /// 399 | /// [`Universe`]: ./struct.Universe.html 400 | /// [`UniverseBuilder`]: ./struct.UniverseBuilder.html 401 | /// [`Default::default`]: https://doc.rust-lang.org/nightly/std/default/trait.Default.html#tymethod.default 402 | pub fn new(f: F) -> Self 403 | where 404 | F: FnOnce(&mut UniverseBuilder) -> &mut UniverseBuilder, 405 | { 406 | let mut builder = UniverseBuilder::default(); 407 | f(&mut builder).build() 408 | } 409 | 410 | /// Populates the [`Universe`] with a new value using a [`BindingBuilder`], returning a 411 | /// [`UniverseEntry`] referencing the given element. If a value with that name already exists, 412 | /// a [`UniverseError`] is returned. 413 | /// 414 | /// [`Universe`]: ./struct.Universe.html 415 | /// [`BindingBuilder`]: ./struct.ValueBuilder.html 416 | /// [`UniverseEntry`]: ./struct.UniverseEntry.html 417 | /// [`UniverseError`]: ./struct.UniverseError.html 418 | pub fn insert(&mut self, f: F) -> Result, UniverseError> 419 | where 420 | F: for<'b> FnOnce(&'b mut BindingBuilder<'a, T>) -> &'b mut BindingBuilder<'a, T>, 421 | { 422 | let mut builder = BindingBuilder::default(); 423 | f(&mut builder); 424 | 425 | let binding = self.bindings.alloc(builder.binding.clone()); 426 | 427 | for t in &builder.tags { 428 | match self.tags.get_mut(t) { 429 | Some(t) => match t { 430 | Tag::Primary(s) => { 431 | let bindings = &self.bindings; 432 | 433 | if s.iter() 434 | .find(|b| match bindings.get(**b) { 435 | Some(b) => b.name == builder.binding.name, 436 | None => false, 437 | }) 438 | .is_none() 439 | { 440 | s.push(binding); 441 | } else { 442 | return Err(UniverseError::BindingAlreadyExists); 443 | } 444 | } 445 | Tag::Secondary(s) => s.push(binding), 446 | }, 447 | None => match t { 448 | TagName::Primary(_) => { 449 | self.tags.insert((*t).clone(), Tag::Primary(vec![binding])); 450 | } 451 | TagName::Secondary(_) => { 452 | self.tags 453 | .insert((*t).clone(), Tag::Secondary(vec![binding])); 454 | } 455 | }, 456 | } 457 | } 458 | 459 | Ok(UniverseEntry { 460 | binding, 461 | tags: builder.tags, 462 | }) 463 | } 464 | 465 | /// Retrives a reference to a [`Binding`] from the internal [`Arena`] using the provided id 466 | /// 467 | /// [`Binding`]: ./struct.Binding.html 468 | /// [`Arena`]: https://docs.rs/id-arena/2.2.1/id_arena/struct.Arena.html 469 | pub fn get( 470 | &self, 471 | id: > as ArenaBehavior>::Id, 472 | ) -> Option<(&Cow<'a, str>, &T)> { 473 | self.bindings.get(id).map(|b| (&b.name, &b.value)) 474 | } 475 | 476 | /// Retrieves a mutable reference to a [`Binding`] from the internal [`Arena`] using the 477 | /// provided id 478 | /// 479 | /// [`Binding`]: ./struct.Binding.html 480 | /// [`Arena`]: https://docs.rs/id-arena/2.2.1/id_arena/struct.Arena.html 481 | pub fn get_mut( 482 | &mut self, 483 | id: > as ArenaBehavior>::Id, 484 | ) -> Option<(&Cow<'a, str>, &mut T)> { 485 | self.bindings.get_mut(id).map(|b| (&b.name, &mut b.value)) 486 | } 487 | 488 | /// Performs an operation over the [`Universe`] using a [`UniverseOperationBuilder`] and 489 | /// returns the resulting [`Tag`] 490 | /// 491 | /// [`Universe`]: ./struct.Universe.html 492 | /// [`UniverseOperationBuilder`]: ./struct.UniverseOperationBuilder.html 493 | /// [`Tag`]: ./enum.Tag.html 494 | pub fn execute(&mut self, f: F) -> Result, UniverseError> 495 | where 496 | F: for<'b> FnOnce( 497 | &'b mut UniverseOperationBuilder<'a>, 498 | ) -> &'b mut UniverseOperationBuilder<'a>, 499 | { 500 | let mut builder = UniverseOperationBuilder::default(); 501 | f(&mut builder); 502 | 503 | let tags = builder.tag_names.ok_or(UniverseError::NoTagsProvided)?; 504 | let sets = ( 505 | self.tags 506 | .get(&tags.0) 507 | .ok_or(UniverseError::InvalidTagName)? 508 | .as_set()?, 509 | self.tags 510 | .get(&tags.1) 511 | .ok_or(UniverseError::InvalidTagName)? 512 | .as_set()?, 513 | ); 514 | 515 | macro generate_length_and_operation_match_clause($sets:ident, $op:ident) {{ 516 | let mut vec = Vec::new(); 517 | OpBuilder::new($sets.0, $sets.1) 518 | .$op() 519 | .extend_collection(&mut vec); 520 | vec 521 | }} 522 | 523 | let (set, op) = match builder.op { 524 | Some(UniverseOperationOp::Union) => ( 525 | generate_length_and_operation_match_clause!(sets, union), 526 | UniverseOperationOp::Union, 527 | ), 528 | Some(UniverseOperationOp::Intersection) => ( 529 | generate_length_and_operation_match_clause!(sets, intersection), 530 | UniverseOperationOp::Intersection, 531 | ), 532 | Some(UniverseOperationOp::Difference) => ( 533 | generate_length_and_operation_match_clause!(sets, difference), 534 | UniverseOperationOp::Difference, 535 | ), 536 | Some(UniverseOperationOp::SymmetricDifference) => ( 537 | generate_length_and_operation_match_clause!(sets, symmetric_difference), 538 | UniverseOperationOp::SymmetricDifference, 539 | ), 540 | None => return Err(UniverseError::NoOperationProvided), 541 | }; 542 | 543 | let group_composition = match tags { 544 | (TagName::Primary(_), TagName::Primary(_)) => TagGroupComposition::Primary, 545 | (TagName::Primary(_), TagName::Secondary(_)) => { 546 | TagGroupComposition::PrimaryAndSecondary 547 | } 548 | (TagName::Secondary(_), TagName::Secondary(_)) => TagGroupComposition::Secondary, 549 | (TagName::Secondary(_), TagName::Primary(_)) => { 550 | TagGroupComposition::SecondaryAndPrimary 551 | } 552 | }; 553 | 554 | Ok(match (group_composition, op) { 555 | ( 556 | TagGroupComposition::Primary 557 | | TagGroupComposition::PrimaryAndSecondary 558 | | TagGroupComposition::Secondary 559 | | TagGroupComposition::SecondaryAndPrimary, 560 | UniverseOperationOp::Union, 561 | ) => Tag::Secondary(set), 562 | 563 | ( 564 | TagGroupComposition::Primary 565 | | TagGroupComposition::PrimaryAndSecondary 566 | | TagGroupComposition::SecondaryAndPrimary, 567 | UniverseOperationOp::Intersection, 568 | ) => Tag::Primary(set), 569 | (TagGroupComposition::Secondary, UniverseOperationOp::Intersection) => { 570 | Tag::Secondary(set) 571 | } 572 | 573 | ( 574 | TagGroupComposition::Primary | TagGroupComposition::PrimaryAndSecondary, 575 | UniverseOperationOp::Difference, 576 | ) => Tag::Primary(set), 577 | ( 578 | TagGroupComposition::Secondary | TagGroupComposition::SecondaryAndPrimary, 579 | UniverseOperationOp::Difference, 580 | ) => Tag::Secondary(set), 581 | 582 | (TagGroupComposition::Primary, UniverseOperationOp::SymmetricDifference) => { 583 | Tag::Primary(set) 584 | } 585 | ( 586 | TagGroupComposition::PrimaryAndSecondary 587 | | TagGroupComposition::Secondary 588 | | TagGroupComposition::SecondaryAndPrimary, 589 | UniverseOperationOp::SymmetricDifference, 590 | ) => Tag::Secondary(set), 591 | }) 592 | } 593 | } 594 | -------------------------------------------------------------------------------- /nix/default.nix: -------------------------------------------------------------------------------- 1 | { sources ? import ./sources.nix }: 2 | 3 | let 4 | mozilla-overlay = import sources.mozilla-overlay.outPath; 5 | in 6 | import sources.nixpkgs { 7 | overlays = [ 8 | mozilla-overlay 9 | ]; 10 | config = {}; 11 | } 12 | -------------------------------------------------------------------------------- /nix/sources.json: -------------------------------------------------------------------------------- 1 | { 2 | "mozilla-overlay": { 3 | "branch": "master", 4 | "description": "mozilla related nixpkgs (extends nixos/nixpkgs repo)", 5 | "homepage": null, 6 | "owner": "mozilla", 7 | "repo": "nixpkgs-mozilla", 8 | "rev": "efda5b357451dbb0431f983cca679ae3cd9b9829", 9 | "sha256": "11wqrg86g3qva67vnk81ynvqyfj0zxk83cbrf0p9hsvxiwxs8469", 10 | "type": "tarball", 11 | "url": "https://github.com/mozilla/nixpkgs-mozilla/archive/efda5b357451dbb0431f983cca679ae3cd9b9829.tar.gz", 12 | "url_template": "https://github.com///archive/.tar.gz" 13 | }, 14 | "niv": { 15 | "branch": "master", 16 | "description": "Easy dependency management for Nix projects", 17 | "homepage": "https://github.com/nmattia/niv", 18 | "owner": "nmattia", 19 | "repo": "niv", 20 | "rev": "fad2a6cbfb2e7cdebb7cb0ad2f5cc91e2c9bc06b", 21 | "sha256": "0mghc1j0rd15spdjx81bayjqr0khc062cs25y5dcfzlxk4ynyc6m", 22 | "type": "tarball", 23 | "url": "https://github.com/nmattia/niv/archive/fad2a6cbfb2e7cdebb7cb0ad2f5cc91e2c9bc06b.tar.gz", 24 | "url_template": "https://github.com///archive/.tar.gz" 25 | }, 26 | "nixpkgs": { 27 | "branch": "nixpkgs-unstable", 28 | "description": "Nix Packages collection", 29 | "homepage": null, 30 | "owner": "NixOS", 31 | "repo": "nixpkgs", 32 | "rev": "f9567594d5af2926a9d5b96ae3bada707280bec6", 33 | "sha256": "0vr2di6z31c5ng73f0cxj7rj9vqvlvx3wpqdmzl0bx3yl3wr39y6", 34 | "type": "tarball", 35 | "url": "https://github.com/NixOS/nixpkgs/archive/f9567594d5af2926a9d5b96ae3bada707280bec6.tar.gz", 36 | "url_template": "https://github.com///archive/.tar.gz" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /nix/sources.nix: -------------------------------------------------------------------------------- 1 | # This file has been generated by Niv. 2 | 3 | let 4 | 5 | # 6 | # The fetchers. fetch_ fetches specs of type . 7 | # 8 | 9 | fetch_file = pkgs: spec: 10 | if spec.builtin or true then 11 | builtins_fetchurl { inherit (spec) url sha256; } 12 | else 13 | pkgs.fetchurl { inherit (spec) url sha256; }; 14 | 15 | fetch_tarball = pkgs: name: spec: 16 | let 17 | ok = str: ! builtins.isNull (builtins.match "[a-zA-Z0-9+-._?=]" str); 18 | # sanitize the name, though nix will still fail if name starts with period 19 | name' = stringAsChars (x: if ! ok x then "-" else x) "${name}-src"; 20 | in 21 | if spec.builtin or true then 22 | builtins_fetchTarball { name = name'; inherit (spec) url sha256; } 23 | else 24 | pkgs.fetchzip { name = name'; inherit (spec) url sha256; }; 25 | 26 | fetch_git = spec: 27 | builtins.fetchGit { url = spec.repo; inherit (spec) rev ref; }; 28 | 29 | fetch_local = spec: spec.path; 30 | 31 | fetch_builtin-tarball = name: throw 32 | ''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`. 33 | $ niv modify ${name} -a type=tarball -a builtin=true''; 34 | 35 | fetch_builtin-url = name: throw 36 | ''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`. 37 | $ niv modify ${name} -a type=file -a builtin=true''; 38 | 39 | # 40 | # Various helpers 41 | # 42 | 43 | # The set of packages used when specs are fetched using non-builtins. 44 | mkPkgs = sources: 45 | let 46 | sourcesNixpkgs = 47 | import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) {}; 48 | hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath; 49 | hasThisAsNixpkgsPath = == ./.; 50 | in 51 | if builtins.hasAttr "nixpkgs" sources 52 | then sourcesNixpkgs 53 | else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then 54 | import {} 55 | else 56 | abort 57 | '' 58 | Please specify either (through -I or NIX_PATH=nixpkgs=...) or 59 | add a package called "nixpkgs" to your sources.json. 60 | ''; 61 | 62 | # The actual fetching function. 63 | fetch = pkgs: name: spec: 64 | 65 | if ! builtins.hasAttr "type" spec then 66 | abort "ERROR: niv spec ${name} does not have a 'type' attribute" 67 | else if spec.type == "file" then fetch_file pkgs spec 68 | else if spec.type == "tarball" then fetch_tarball pkgs name spec 69 | else if spec.type == "git" then fetch_git spec 70 | else if spec.type == "local" then fetch_local spec 71 | else if spec.type == "builtin-tarball" then fetch_builtin-tarball name 72 | else if spec.type == "builtin-url" then fetch_builtin-url name 73 | else 74 | abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}"; 75 | 76 | # If the environment variable NIV_OVERRIDE_${name} is set, then use 77 | # the path directly as opposed to the fetched source. 78 | replace = name: drv: 79 | let 80 | saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name; 81 | ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}"; 82 | in 83 | if ersatz == "" then drv else ersatz; 84 | 85 | # Ports of functions for older nix versions 86 | 87 | # a Nix version of mapAttrs if the built-in doesn't exist 88 | mapAttrs = builtins.mapAttrs or ( 89 | f: set: with builtins; 90 | listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set)) 91 | ); 92 | 93 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295 94 | range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1); 95 | 96 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257 97 | stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1)); 98 | 99 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269 100 | stringAsChars = f: s: concatStrings (map f (stringToCharacters s)); 101 | concatStrings = builtins.concatStringsSep ""; 102 | 103 | # fetchTarball version that is compatible between all the versions of Nix 104 | builtins_fetchTarball = { url, name, sha256 }@attrs: 105 | let 106 | inherit (builtins) lessThan nixVersion fetchTarball; 107 | in 108 | if lessThan nixVersion "1.12" then 109 | fetchTarball { inherit name url; } 110 | else 111 | fetchTarball attrs; 112 | 113 | # fetchurl version that is compatible between all the versions of Nix 114 | builtins_fetchurl = { url, sha256 }@attrs: 115 | let 116 | inherit (builtins) lessThan nixVersion fetchurl; 117 | in 118 | if lessThan nixVersion "1.12" then 119 | fetchurl { inherit url; } 120 | else 121 | fetchurl attrs; 122 | 123 | # Create the final "sources" from the config 124 | mkSources = config: 125 | mapAttrs ( 126 | name: spec: 127 | if builtins.hasAttr "outPath" spec 128 | then abort 129 | "The values in sources.json should not have an 'outPath' attribute" 130 | else 131 | spec // { outPath = replace name (fetch config.pkgs name spec); } 132 | ) config.sources; 133 | 134 | # The "config" used by the fetchers 135 | mkConfig = 136 | { sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null 137 | , sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile) 138 | , pkgs ? mkPkgs sources 139 | }: rec { 140 | # The sources, i.e. the attribute set of spec name to spec 141 | inherit sources; 142 | 143 | # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers 144 | inherit pkgs; 145 | }; 146 | 147 | in 148 | mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); } 149 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # snowflake 2 | 3 | a fast, low-level, and expressive programming language designed for minimal usage of hierarchy 4 | 5 | ## a brief history 6 | 7 | snowflake, the language, originally started as a collection of ideas to improve programming that 8 | somewhat resembled a mashup of go and rust (more heavily influenced by go at the time, but with the 9 | lower-level-ness of rust and some additional features superwhiskers, the author, thought would be 10 | nice to have in go). 11 | 12 | 13 | ![snowflake in 2018](https://256.sh/i/5drn734b.png) 14 | 15 | 16 | further on in conceptualization, it gradually shifted to be more and more rust-like, until the 17 | point where it became rust but with a few tweaks such as the removal of ownership to allow for 18 | fully manual memory management by default, and other extensions (that in hindsight wouldn't make 19 | sense to create what was essentially a fork to implement). 20 | 21 | 22 | eventually, it shifted to become more like lisp (retaining strong typing and manually managed 23 | memory) and had some things, such as most types and other unnecessary (in superwhiskers' opinion) 24 | language items/features stripped out to make a really portable language. in addition, macros were 25 | buffed to add in an _all-powerful_ file-wide macro kind that could be used to implement alternative 26 | syntaxes on top of a lispy syntax to allow people to use the language the way they wanted 27 | 28 | 29 | ![snowflake but lispy](https://256.sh/i/v2936j95.png) 30 | 31 | ![snowflake but lispy (no methods)](https://256.sh/i/6tpe2hzh.png) 32 | 33 | 34 | later on, methods were removed (as seen above) and tagging was created/discovered/applied to the 35 | language. this discovery heavily influenced the language later on, as even though superwhiskers was 36 | hesitant to apply it everywhere at first, it gradually made its way in, creating the language you 37 | see today 38 | 39 | 40 | ![snowflake but ml](https://256.sh/i/6mjmmz7j.png) 41 | 42 | 43 | more recently, the lispy syntax was outright removed to simplify the parser and make it easier to 44 | implement, but alternative syntaxes are not completely gone, as they are now intended to be done 45 | using plugins. however, technically speaking, the current ml-like syntax you see now has existed 46 | since the lispy one, as it was intended to be the "default syntax", used in most cases as it would 47 | be drastically easier to work with (typed lisps are a pain). 48 | 49 | ## the language itself 50 | 51 | snowflake is a language designed from the start to be low level. ideally, it should take very 52 | little to port snowflake to a new platform than c (due to very little types existing in the 53 | language itself and because it assumes very little about the underlying platform). aside from that, 54 | other goals/features include (but are not limited to): 55 | 56 | - non-hierarchical programming (in both module system and type system) 57 | - speed (due to the expressiveness of macros and such + optimizations + low-level-ness, one can get 58 | more performance out of equivalent code in other languages without needing to rely on the 59 | implementation) 60 | - a primarily functional programming style (without sacrificing speed) 61 | - clean, easy to read syntax that should be familiar to users of other functional languages 62 | 63 | ## an explaination of tagging 64 | 65 | tagging is essentially applied set theory; it is based entirely upon single-layer collections of 66 | objects that can have operations applied to them in order to construct new tags. these operations 67 | can be listed as such: 68 | 69 | - intersection, which returns a tag containing all of the common items between the operands 70 | - union, which returns a tag containing all of the items in both sets regardless of presence 71 | - difference, which returns a tag containing the items in the first set minus the ones that exist 72 | in the second 73 | - symmetric difference/xor, which returns all of the items not in both sets 74 | 75 | aside from that, snowflake tweaks the set theory model to create two kinds of tags: primary and 76 | secondary tags. primary tags are just mathematical sets, and secondary tags are mathematical sets 77 | without a uniqueness restriction. there is also one more change, the sets exclusively contain 78 | name-value bindings (like a map/key-value store/whatever) and primary tags only need to be unique 79 | on the name 80 | 81 | if you don't understand this, you can visualize it like how set theory is taught really early on in 82 | grade school: as a [venn diagram](https://en.wikipedia.org/wiki/Venn_diagram), where intersections 83 | are the overlapping parts of components, unions are two components combined, etc... 84 | 85 | ## the event system 86 | 87 | the snowflake event system is a portion of the standard library that is designed to cleanly 88 | abstract over additional concurrency systems as well as provide a model for how to implement 89 | concurrency in a potential "snowflake os" 90 | 91 | it loosely works like this 92 | - program has a central definition of event datatypes and the event handlers that take them 93 | - program starts with a "genesis" event 94 | - event handlers respond to this 95 | - event handlers then emit new events (yes, they can emit multiple) (they can also emit nothing and 96 | those chains die) 97 | - new event handlers respond to those 98 | - the cycle continues 99 | 100 | the idea behind it is that events are decoupled from their handlers and this would provide a more 101 | modular means of concurrency. it also avoids "coloring" functions because all handlers can be 102 | called like normal functions and can have their outputs chained to successive handlers in that 103 | chain of events. 104 | 105 | ## a roadmap 106 | 107 | - implement a parser from scratch to improve error messages and make it less hacky 108 | - redo the interpreter to actually typecheck, then make it compile to bytecode for better 109 | performance 110 | - add in tag typing (tagging applied to compound types) 111 | - add in macros 112 | - consider implementing dependent typing around this time (go down to [literature](#literature) to 113 | see some references/examples of what we're looking at) 114 | - consider implementing algebraic effects 115 | - look at adding in a distinction between purity / impurity & safe / unsafe, potentially done 116 | without a language feature using algebraic effects 117 | - implement a compiler backend using qbe (see [literature](#literature)) 118 | - polish more of the core language, smoothing out rough edges and taking feedback 119 | - add a minimal standard library + core library and polish things 120 | - implement [the event system](#the-event-system) 121 | - selfhost and implement llvm post-selfhosting as well as port existing things depending on the 122 | rust impl 123 | - attempt to implement a jit 124 | - write a package manager 125 | 126 | ## literature 127 | 128 | ### compiler backends 129 | 130 | - [the qbe compiler backend](https://c9x.me/compile) 131 | 132 | ### dependent typing 133 | 134 | - [formality](https://github.com/moonad/formality) 135 | 136 | ## examples 137 | 138 | ### hello world 139 | 140 | ```snowflake 141 | def main => 142 | *println "hello, world" 143 | ``` 144 | 145 | (there isn't much else that isn't likely to change) 146 | 147 | ## links 148 | 149 | - [the repl.it team](https://repl.it/@snowflakelang) 150 | - [the discord guild](https://discord.gg/rBbfDEr) 151 | - [the telegram group](https://t.me/joinchat/GwKOHRzeLzT2Jktw_4SeVg) 152 | - [the github organization](https://github.com/snowflake-language) 153 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | { sources ? import ./nix/sources.nix 2 | , pkgs ? import ./nix { inherit sources; } }: 3 | 4 | pkgs.mkShell { 5 | name = "snowflake-shell"; 6 | 7 | buildInputs = with pkgs; [ 8 | latest.rustChannels.nightly.rust 9 | niv 10 | ]; 11 | } 12 | --------------------------------------------------------------------------------