├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── OLD_README.md ├── README.md ├── config ├── pp_names.txt └── pp_options.txt ├── docs ├── build_docs.md └── changelog.md └── src ├── cli.rs ├── env.rs ├── errors.rs ├── expr.rs ├── inductive.rs ├── level.rs ├── main.rs ├── name.rs ├── parser.rs ├── pretty ├── components.rs ├── mod.rs └── pretty_printer.rs ├── quot.rs ├── reduction.rs ├── tc.rs └── utils.rs /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | callgrind* 4 | dots/* 5 | src/inductive/* 6 | src/local_ctx.rs 7 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | [[package]] 4 | name = "ansi_term" 5 | version = "0.11.0" 6 | source = "registry+https://github.com/rust-lang/crates.io-index" 7 | checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" 8 | dependencies = [ 9 | "winapi", 10 | ] 11 | 12 | [[package]] 13 | name = "atty" 14 | version = "0.2.13" 15 | source = "registry+https://github.com/rust-lang/crates.io-index" 16 | checksum = "1803c647a3ec87095e7ae7acfca019e98de5ec9a7d01343f611cf3152ed71a90" 17 | dependencies = [ 18 | "libc", 19 | "winapi", 20 | ] 21 | 22 | [[package]] 23 | name = "bitflags" 24 | version = "1.1.0" 25 | source = "registry+https://github.com/rust-lang/crates.io-index" 26 | checksum = "3d155346769a6855b86399e9bc3814ab343cd3d62c7e985113d46a0ec3c281fd" 27 | 28 | [[package]] 29 | name = "byteorder" 30 | version = "1.3.2" 31 | source = "registry+https://github.com/rust-lang/crates.io-index" 32 | checksum = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" 33 | 34 | [[package]] 35 | name = "cc" 36 | version = "1.0.40" 37 | source = "registry+https://github.com/rust-lang/crates.io-index" 38 | checksum = "b548a4ee81fccb95919d4e22cfea83c7693ebfd78f0495493178db20b3139da7" 39 | 40 | [[package]] 41 | name = "cfg-if" 42 | version = "0.1.9" 43 | source = "registry+https://github.com/rust-lang/crates.io-index" 44 | checksum = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33" 45 | 46 | [[package]] 47 | name = "clap" 48 | version = "2.33.0" 49 | source = "registry+https://github.com/rust-lang/crates.io-index" 50 | checksum = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" 51 | dependencies = [ 52 | "ansi_term", 53 | "atty", 54 | "bitflags", 55 | "strsim", 56 | "textwrap", 57 | "unicode-width", 58 | "vec_map", 59 | ] 60 | 61 | [[package]] 62 | name = "cloudabi" 63 | version = "0.0.3" 64 | source = "registry+https://github.com/rust-lang/crates.io-index" 65 | checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" 66 | dependencies = [ 67 | "bitflags", 68 | ] 69 | 70 | [[package]] 71 | name = "crossbeam-utils" 72 | version = "0.6.6" 73 | source = "registry+https://github.com/rust-lang/crates.io-index" 74 | checksum = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6" 75 | dependencies = [ 76 | "cfg-if", 77 | "lazy_static", 78 | ] 79 | 80 | [[package]] 81 | name = "fxhash" 82 | version = "0.2.1" 83 | source = "registry+https://github.com/rust-lang/crates.io-index" 84 | checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" 85 | dependencies = [ 86 | "byteorder", 87 | ] 88 | 89 | [[package]] 90 | name = "hashbrown" 91 | version = "0.3.1" 92 | source = "registry+https://github.com/rust-lang/crates.io-index" 93 | checksum = "29fba9abe4742d586dfd0c06ae4f7e73a1c2d86b856933509b269d82cdf06e18" 94 | 95 | [[package]] 96 | name = "heck" 97 | version = "0.3.1" 98 | source = "registry+https://github.com/rust-lang/crates.io-index" 99 | checksum = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" 100 | dependencies = [ 101 | "unicode-segmentation", 102 | ] 103 | 104 | [[package]] 105 | name = "lazy_static" 106 | version = "1.3.0" 107 | source = "registry+https://github.com/rust-lang/crates.io-index" 108 | checksum = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14" 109 | 110 | [[package]] 111 | name = "libc" 112 | version = "0.2.62" 113 | source = "registry+https://github.com/rust-lang/crates.io-index" 114 | checksum = "34fcd2c08d2f832f376f4173a231990fa5aef4e99fb569867318a227ef4c06ba" 115 | 116 | [[package]] 117 | name = "lock_api" 118 | version = "0.3.1" 119 | source = "registry+https://github.com/rust-lang/crates.io-index" 120 | checksum = "f8912e782533a93a167888781b836336a6ca5da6175c05944c86cf28c31104dc" 121 | dependencies = [ 122 | "scopeguard", 123 | ] 124 | 125 | [[package]] 126 | name = "mimalloc-sys" 127 | version = "0.1.5" 128 | source = "registry+https://github.com/rust-lang/crates.io-index" 129 | checksum = "b95f8e6d03de63db3537f5e0b59b9ca5247d8d42b1bd8b7db5f36ec49b885351" 130 | dependencies = [ 131 | "cc", 132 | "libc", 133 | ] 134 | 135 | [[package]] 136 | name = "mimallocator" 137 | version = "0.1.3" 138 | source = "registry+https://github.com/rust-lang/crates.io-index" 139 | checksum = "2d44fe4ebf6b538fcf39d9975c2b90bb3232d1ba8e8bffeacd004f27b20c577a" 140 | dependencies = [ 141 | "mimalloc-sys", 142 | ] 143 | 144 | [[package]] 145 | name = "nanoda" 146 | version = "0.1.3" 147 | dependencies = [ 148 | "crossbeam-utils", 149 | "fxhash", 150 | "hashbrown", 151 | "mimallocator", 152 | "parking_lot", 153 | "stacker", 154 | "structopt", 155 | ] 156 | 157 | [[package]] 158 | name = "parking_lot" 159 | version = "0.9.0" 160 | source = "registry+https://github.com/rust-lang/crates.io-index" 161 | checksum = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252" 162 | dependencies = [ 163 | "lock_api", 164 | "parking_lot_core", 165 | "rustc_version", 166 | ] 167 | 168 | [[package]] 169 | name = "parking_lot_core" 170 | version = "0.6.2" 171 | source = "registry+https://github.com/rust-lang/crates.io-index" 172 | checksum = "b876b1b9e7ac6e1a74a6da34d25c42e17e8862aa409cbbbdcfc8d86c6f3bc62b" 173 | dependencies = [ 174 | "cfg-if", 175 | "cloudabi", 176 | "libc", 177 | "redox_syscall", 178 | "rustc_version", 179 | "smallvec", 180 | "winapi", 181 | ] 182 | 183 | [[package]] 184 | name = "proc-macro2" 185 | version = "0.4.30" 186 | source = "registry+https://github.com/rust-lang/crates.io-index" 187 | checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" 188 | dependencies = [ 189 | "unicode-xid", 190 | ] 191 | 192 | [[package]] 193 | name = "quote" 194 | version = "0.6.13" 195 | source = "registry+https://github.com/rust-lang/crates.io-index" 196 | checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" 197 | dependencies = [ 198 | "proc-macro2", 199 | ] 200 | 201 | [[package]] 202 | name = "redox_syscall" 203 | version = "0.1.56" 204 | source = "registry+https://github.com/rust-lang/crates.io-index" 205 | checksum = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" 206 | 207 | [[package]] 208 | name = "rustc_version" 209 | version = "0.2.3" 210 | source = "registry+https://github.com/rust-lang/crates.io-index" 211 | checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" 212 | dependencies = [ 213 | "semver", 214 | ] 215 | 216 | [[package]] 217 | name = "scopeguard" 218 | version = "1.0.0" 219 | source = "registry+https://github.com/rust-lang/crates.io-index" 220 | checksum = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" 221 | 222 | [[package]] 223 | name = "semver" 224 | version = "0.9.0" 225 | source = "registry+https://github.com/rust-lang/crates.io-index" 226 | checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" 227 | dependencies = [ 228 | "semver-parser", 229 | ] 230 | 231 | [[package]] 232 | name = "semver-parser" 233 | version = "0.7.0" 234 | source = "registry+https://github.com/rust-lang/crates.io-index" 235 | checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" 236 | 237 | [[package]] 238 | name = "smallvec" 239 | version = "0.6.10" 240 | source = "registry+https://github.com/rust-lang/crates.io-index" 241 | checksum = "ab606a9c5e214920bb66c458cd7be8ef094f813f20fe77a54cc7dbfff220d4b7" 242 | 243 | [[package]] 244 | name = "stacker" 245 | version = "0.1.5" 246 | source = "registry+https://github.com/rust-lang/crates.io-index" 247 | checksum = "fb79482f57cf598af52094ec4cc3b3c42499d3ce5bd426f2ac41515b7e57404b" 248 | dependencies = [ 249 | "cc", 250 | "cfg-if", 251 | "libc", 252 | "winapi", 253 | ] 254 | 255 | [[package]] 256 | name = "strsim" 257 | version = "0.8.0" 258 | source = "registry+https://github.com/rust-lang/crates.io-index" 259 | checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" 260 | 261 | [[package]] 262 | name = "structopt" 263 | version = "0.2.18" 264 | source = "registry+https://github.com/rust-lang/crates.io-index" 265 | checksum = "16c2cdbf9cc375f15d1b4141bc48aeef444806655cd0e904207edc8d68d86ed7" 266 | dependencies = [ 267 | "clap", 268 | "structopt-derive", 269 | ] 270 | 271 | [[package]] 272 | name = "structopt-derive" 273 | version = "0.2.18" 274 | source = "registry+https://github.com/rust-lang/crates.io-index" 275 | checksum = "53010261a84b37689f9ed7d395165029f9cc7abb9f56bbfe86bee2597ed25107" 276 | dependencies = [ 277 | "heck", 278 | "proc-macro2", 279 | "quote", 280 | "syn", 281 | ] 282 | 283 | [[package]] 284 | name = "syn" 285 | version = "0.15.44" 286 | source = "registry+https://github.com/rust-lang/crates.io-index" 287 | checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" 288 | dependencies = [ 289 | "proc-macro2", 290 | "quote", 291 | "unicode-xid", 292 | ] 293 | 294 | [[package]] 295 | name = "textwrap" 296 | version = "0.11.0" 297 | source = "registry+https://github.com/rust-lang/crates.io-index" 298 | checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" 299 | dependencies = [ 300 | "unicode-width", 301 | ] 302 | 303 | [[package]] 304 | name = "unicode-segmentation" 305 | version = "1.3.0" 306 | source = "registry+https://github.com/rust-lang/crates.io-index" 307 | checksum = "1967f4cdfc355b37fd76d2a954fb2ed3871034eb4f26d60537d88795cfc332a9" 308 | 309 | [[package]] 310 | name = "unicode-width" 311 | version = "0.1.6" 312 | source = "registry+https://github.com/rust-lang/crates.io-index" 313 | checksum = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20" 314 | 315 | [[package]] 316 | name = "unicode-xid" 317 | version = "0.1.0" 318 | source = "registry+https://github.com/rust-lang/crates.io-index" 319 | checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" 320 | 321 | [[package]] 322 | name = "vec_map" 323 | version = "0.8.1" 324 | source = "registry+https://github.com/rust-lang/crates.io-index" 325 | checksum = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" 326 | 327 | [[package]] 328 | name = "winapi" 329 | version = "0.3.7" 330 | source = "registry+https://github.com/rust-lang/crates.io-index" 331 | checksum = "f10e386af2b13e47c89e7236a7a14a086791a2b88ebad6df9bf42040195cf770" 332 | dependencies = [ 333 | "winapi-i686-pc-windows-gnu", 334 | "winapi-x86_64-pc-windows-gnu", 335 | ] 336 | 337 | [[package]] 338 | name = "winapi-i686-pc-windows-gnu" 339 | version = "0.4.0" 340 | source = "registry+https://github.com/rust-lang/crates.io-index" 341 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 342 | 343 | [[package]] 344 | name = "winapi-x86_64-pc-windows-gnu" 345 | version = "0.4.0" 346 | source = "registry+https://github.com/rust-lang/crates.io-index" 347 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 348 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "nanoda" 3 | version = "0.1.3" 4 | authors = ["ammkrn "] 5 | edition = "2018" 6 | 7 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 8 | 9 | [features] 10 | mimalloc = [] 11 | default = ["mimalloc"] 12 | 13 | [profile.release] 14 | opt-level=3 15 | #lto=true 16 | #debug = true 17 | 18 | [dependencies] 19 | crossbeam-utils = "0.6.6" 20 | fxhash = "0.2.1" 21 | hashbrown = "0.3.0" 22 | parking_lot = "0.9.0" 23 | structopt = "0.2.18" 24 | mimallocator = "0.1.3" 25 | stacker = "0.1.5" -------------------------------------------------------------------------------- /OLD_README.md: -------------------------------------------------------------------------------- 1 | ## About nanoda 2 | 3 | Enormous thanks to Leonard de Moura, Soonho Kong, Sebastian Ullrich, Gabriel Ebner, Floris van Doorn, Mario Carneiro, Kevin Buzzard, Chris Hughes, Patrick Massot, Jeremy Avigad, and Kenny Lau for their combined efforts in creating/documenting Lean, and/or for their willingness to share their knowledge with the unwashed masses on Lean's Zulip. 4 | 5 | This project is based on Gabriel Ebner's [trepplein](https://github.com/gebner/trepplein) 6 | 7 | --- 8 | 9 | Nanoda is a type checker for the Lean theorem prover, specifically its [export format](https://github.com/leanprover/lean/blob/master/doc/export_format.md). It includes a pretty printer and a command line interface. 10 | 11 | 12 | --- 13 | 14 | ### How to use 15 | 16 | ** As of 0.1.1, [mimalloc](https://github.com/microsoft/mimalloc.git) is the default global allocator, but you can disable this by passing the `--no-default-features` flag when running the executable. Thanks (again) to Sebastian Ullrich for this suggestion. 17 | 18 | 1. Install cargo (Rust's package manager) if you don't already have it. 19 | 2. Clone this repository. 20 | 3. From this repository's root folder, execute `cargo build --release` (it will be incredibly slow without the release flag, so don't forget that). 21 | 4. The built binary will be in /target/release/nanoda, so you can either run it from there (use `./nanoda --help` to see options), or you can run it through cargo, but the syntax is a little weird : `cargo run --release -- `. For example `cargo run --release -- --threads 8 --print mathlib_export.out` 22 | 23 | --- 24 | 25 | ## nanoda について 26 | 27 | Leonard de Moura, Soonho Kong, Sebastian Ullrich, Gabriel Ebner, Floris van Doorn, Mario Carneiro, Kevin Buzzard, Chris Hughes, Patrick Massot, Jeremy Avigad, と Kenny Lau にすごく感謝してます。 28 | 29 | 元々Gabriel Epnerの[trepplein](https://github.com/gebner/trepplein)を参照して作られたものです。 30 | 31 | 32 | このプロジェクトは Lean とうい証明支援システム・依存型プログラミング言語の型検査装置です。プリティープリンターもCLIも含む。 33 | 34 | 35 | ### 使い方 36 | 37 | 38 | ** バージョン0.1.1現在, デフォールトで用いられるアロケーターは[mimalloc](https://github.com/microsoft/mimalloc.git)ですが`--no-default-features`フラグを渡すことでmimallocの代わりにシステムのデフォールトが使える。 39 | 40 | 1. cargo (ラスト言語のパケージマネジャー)をインストールして下さい。 41 | 2. このリポジトリーをクローンして。 42 | 3. このリポのルートフォルダーから、`cargo build --release` にして下さい。`--release` の分がなければ非常に遅くなるので忘れないで下さい。 43 | 4. 作られた実行形式は /target/release/nanoda に位置しているはずですので、そこから普通のように実行出来ます(`./nanoda --help` で詳しいことが見える)。cargo でも実行できますが、構文はちょっと長たらしくなって : `cargo run --release -- ` っていうように、例えば `cargo run --release -- --threads 8 --print mathlib_export.out`。 44 | 45 | 46 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## THIS REPOSITORY IS DEPRECATED 4 | 5 | Active development of this project has moved to [nanoda_lib](https://github.com/ammkrn/nanoda_lib.git). Information about this version (the old one) can still be found in OLD_README.md 6 | 7 | * This version implements the older reduction strategy and hasn't been tuned for the sheer size of newer versions of mathlib. Please be aware that running this against recent builds of mathlib may result in an out of memory error. 8 | 9 | 10 | ## 非推奨通知 11 | 12 | このプロジェクトの開発は[nanoda_lib](https://github.com/ammkrn/nanoda_lib.git)へ移転しました。このバージョン(古いの)に関する情報は OLD_README.md でまだ読めます。 13 | -------------------------------------------------------------------------------- /config/pp_names.txt: -------------------------------------------------------------------------------- 1 | # Lines beginning with a hashtag are ignored as comments. 2 | # Place each definition's name on a new line. 3 | # Example : to print eq.rec and quot.lift, delete the hash tags in the lines below 4 | 5 | #eq.rec 6 | #quot.lift -------------------------------------------------------------------------------- /config/pp_options.txt: -------------------------------------------------------------------------------- 1 | # lines beginning with a hashtag are ignored as comments 2 | # options are written as they are in Lean. (set_option prefix is optional) 3 | # each option goes on a new line 4 | # an example which specifies the default settings can be seen below : 5 | 6 | pp.all false 7 | pp.implicit false 8 | pp.notation true 9 | pp.proofs true 10 | pp.locals_full_names false 11 | pp.indent 2 12 | pp.width 80 -------------------------------------------------------------------------------- /docs/build_docs.md: -------------------------------------------------------------------------------- 1 | Cargo (Rust's package manager) comes with a really amazing tool called [rustdoc](https://doc.rust-lang.org/rustdoc/index.html) that allows authors to write documentation in-line so that it annotates the code it's associated with and lays it out really nicely in the browser. To build this project's docs, go to the project root and execute `cargo doc --open`. -------------------------------------------------------------------------------- /docs/changelog.md: -------------------------------------------------------------------------------- 1 | 2 | ## + 31/08/2019 3 | 4 | Made changes to allow the stack to grow when executing recursive definitional equality checks. This is rarely used, but is necessary to check definitions like `pi_gt_314` without experiencing stack overflow. Some other changes to related parts of the type checker are planned to bring it more in line with Lean 4 which will hopefully give some insight into how the stack growth can be alleviated. 5 | 6 | --- 7 | 8 | ## + 24/08/2019 9 | 10 | Switched to [mimalloc](https://github.com/microsoft/mimalloc.git) as the default global allocator. Can be disabled by pasing the `--no-default-features` flag when running the executable. -------------------------------------------------------------------------------- /src/cli.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use std::fs::read_to_string; 3 | 4 | use std::path::PathBuf; 5 | 6 | use parking_lot::RwLock; 7 | use structopt::StructOpt; 8 | 9 | 10 | use crate::name::{ Name, mk_anon }; 11 | use crate::pretty::pretty_printer::{ PrettyPrinter, PPOptions }; 12 | use crate::env::Env; 13 | 14 | /// A basic example 15 | #[derive(StructOpt, Debug)] 16 | #[structopt(raw(setting = "structopt::clap::AppSettings::ColoredHelp"))] 17 | #[structopt(name = "nanoda", 18 | about = "A type checker for the Lean theorem prover", 19 | author = "ammkrn", 20 | version = "0.0.1")] 21 | pub struct Opt { 22 | 23 | #[structopt(short = "f", long = "force")] 24 | pub force : bool, 25 | //A flag, true if used in the command line. Note doc comment will 26 | //be used for the help message of the flag. 27 | //Activate debug mode (currently does nothing) 28 | #[structopt(short = "d", long = "debug")] 29 | pub debug: bool, 30 | 31 | 32 | /** tell r_type how many threads you want it to use. 33 | Use `1` to check in serial, though r_type is 34 | very much not optimized for serial execution. 35 | Recommended : 4-8. */ 36 | #[structopt(short = "t", long = "threads", default_value = "4")] 37 | pub num_threads : u64, 38 | 39 | /** tell r_type you want to pretty print something; options and the 40 | list of definitions to print are set in config files, called 41 | `pretty_options.txt` and `pretty_names.txt`. r_type will look for these 42 | both in the current working directory, and in an optional subdirectory 43 | called `config`. 44 | The names of the definitions you want pretty printed should be line separated 45 | and are accepted in the same format as in Lean (IE has_add.rec). 46 | The pretty printer options should also be line separated, and are also 47 | accepted as in Lean. For example, to turn on implicits, r_type will accept 48 | either `set_option pp.implicit true` or `pp.implicit true`. 49 | Both of these files will ignores (as comments) lines beginning with `#` */ 50 | #[structopt(short = "p", long = "print")] 51 | pub print : bool, 52 | 53 | /** File(s) to type check. Passing only a filename will look in the 54 | current directory. A full path will look for the file in the 55 | specified location*/ 56 | #[structopt(name = "FILE x N", parse(from_os_str))] 57 | files: Vec, 58 | } 59 | 60 | impl Opt { 61 | pub fn try_read_files(&self) -> Result, std::io::Error>{ 62 | self.files.iter().map(|x| try_read_cwd(x)).collect() 63 | } 64 | 65 | } 66 | 67 | fn try_read_cwd(suggestion : &PathBuf) -> Result { 68 | match std::env::current_dir() { 69 | Err(_) => read_to_string(suggestion), 70 | Ok(mut path) => { 71 | path.push(suggestion.clone()); 72 | read_to_string(path) 73 | } 74 | } 75 | } 76 | 77 | // I'll fix these at some point; at the moment we're (very) 78 | // fast and loose with the parsing, and parsing fails silently. 79 | fn find_true_else_false(s : &str) -> bool { 80 | if s.contains("true") { 81 | return true 82 | } else { 83 | false 84 | } 85 | } 86 | 87 | fn find_first_usize(s : &str) -> Option { 88 | for ws in s.split_whitespace() { 89 | match ws.parse::() { 90 | Ok(n) => return Some(n), 91 | _ => continue 92 | } 93 | } 94 | 95 | None 96 | } 97 | 98 | pub fn try_read_pp_options() -> Option { 99 | let mut cwd = std::env::current_dir().ok()?; 100 | let mut cwd_separate_cfg = cwd.clone(); 101 | cwd.push(PathBuf::from("pp_options.txt")); 102 | cwd_separate_cfg.push(PathBuf::from("config/pp_options.txt")); 103 | 104 | let mut empty_options = PPOptions::new_default(); 105 | 106 | // try to read in both locations 107 | for line in read_to_string(cwd) 108 | .ok() 109 | .or(read_to_string(cwd_separate_cfg).ok())? 110 | .lines() { 111 | match line { 112 | s if s.starts_with('#') => (), 113 | s if s.contains("pp.all") => empty_options.all = find_true_else_false(s), 114 | s if s.contains("pp.implicit") => empty_options.implicit = find_true_else_false(s), 115 | s if s.contains("pp.notation") => empty_options.notation = find_true_else_false(s), 116 | s if s.contains("pp.proofs") => empty_options.proofs = find_true_else_false(s), 117 | s if s.contains("pp.locals_full_names") => empty_options.locals_full_names = find_true_else_false(s), 118 | s if s.contains("pp.indent") => empty_options.indent = find_first_usize(s)?, 119 | s if s.contains("pp.width") => empty_options.width = find_first_usize(s)?, 120 | _ => () 121 | } 122 | } 123 | 124 | Some(empty_options) 125 | 126 | 127 | } 128 | 129 | pub fn try_read_pp_file() -> Option<(Vec, Vec)> { 130 | let mut cwd = std::env::current_dir().ok()?; 131 | let mut cwd_separate_cfg = cwd.clone(); 132 | cwd.push(PathBuf::from("pp_names.txt")); 133 | cwd_separate_cfg.push(PathBuf::from("config/pp_names.txt")); 134 | 135 | let (mut names, mut errs) = (Vec::new(), Vec::new()); 136 | 137 | for line in read_to_string(cwd) 138 | .ok() 139 | .or(read_to_string(cwd_separate_cfg).ok())? 140 | .lines() { 141 | match line.parse::() { 142 | Ok(n) => names.push(n), 143 | Err(_) => errs.push(String::from(line)) 144 | } 145 | } 146 | 147 | Some((names, errs)) 148 | } 149 | 150 | // Just prints to stdout until I figure out what I actually 151 | // want to do with this. 152 | pub fn pp_bundle(env : &Arc>) { 153 | match try_read_pp_file() { 154 | None => (), 155 | Some((ns, _)) => { 156 | if ns.is_empty() { 157 | println!("\nNo items to pretty print\n"); 158 | } else { 159 | let pp_options = try_read_pp_options(); 160 | //let mut outputs = Vec::::with_capacity(ns.len()); 161 | println!("\nBEGIN PRETTY PRINTER OUTPUT : \n"); 162 | for n in ns.iter() { 163 | let rendered = PrettyPrinter::print_declar(pp_options.clone(), n, &env); 164 | println!("{}\n", rendered); 165 | } 166 | println!("END PRETTY PRINTER OUTPUT : \n"); 167 | } 168 | } 169 | } 170 | } 171 | 172 | 173 | impl std::str::FromStr for Name { 174 | type Err = String; 175 | fn from_str(s : &str) -> Result { 176 | let mut base = mk_anon(); 177 | 178 | if s.is_empty() { 179 | return Err(format!("Cannot pretty print the empty/anonymous Lean name!")) 180 | } 181 | 182 | let fragments = s.split_terminator('.'); 183 | 184 | for f in fragments { 185 | match f.parse::() { 186 | Ok(n) => { base = base.extend_num(n); }, 187 | _ => { 188 | if f.is_empty() { 189 | return Err(format!("Name cannot be empty!")) 190 | } else if f.starts_with('#') { 191 | return Err(format!("Commented out")) 192 | } else { 193 | base = base.extend_str(f); 194 | } 195 | } 196 | } 197 | } 198 | 199 | Ok(base) 200 | } 201 | } -------------------------------------------------------------------------------- /src/env.rs: -------------------------------------------------------------------------------- 1 | 2 | use std::sync::Arc; 3 | use hashbrown::{ HashMap, HashSet }; 4 | use parking_lot::RwLock; 5 | 6 | use crate::name::Name; 7 | use crate::level::Level; 8 | use crate::expr::{ Expr, unique_const_names, univ_params_subset, mk_const }; 9 | use crate::reduction::{ ReductionRule, ReductionMap }; 10 | use crate::quot::Quot; 11 | use crate::inductive::Inductive; 12 | use crate::tc::TypeChecker; 13 | use crate::pretty::components::Notation; 14 | 15 | use Modification::*; 16 | use CompiledModification::*; 17 | 18 | 19 | /// Generic wrapper used to describe items to be added to 20 | /// the environment in some capacity, including axioms, 21 | /// parts of inductive declarations, and parts of 22 | /// quotient. See the method `tc::def_height()` for a description 23 | /// of what height is. 24 | #[derive(Debug, Clone, PartialEq)] 25 | pub struct Declaration { 26 | pub name: Name, 27 | pub univ_params: Arc>, 28 | pub ty: Expr, 29 | pub height: u16, 30 | pub builtin: bool, 31 | } 32 | 33 | /// Environment containing the declarations, reduction rules, 34 | /// and notations that make up the context for a set of Lean 35 | /// items. Essentially, "the place where everything goes", and 36 | /// "the place you go to get stuff". We interact with this 37 | /// through an atomically reference counted RwLock so we can 38 | /// interact with it from different threads, but because Arc> 39 | /// dereferences to , and ParkingLot's RwLock implementation 40 | /// doesn't need to return a result, that part of it is usually 41 | /// transparent. 42 | #[derive(Clone)] 43 | pub struct Env { 44 | pub declarations: HashMap, 45 | pub reduction_map: ReductionMap, 46 | pub notations : HashMap, 47 | } 48 | 49 | /// What you see is what you get. Has a name, a vector of universe 50 | /// parameters, and its type. 51 | #[derive(Clone)] 52 | pub struct Axiom { 53 | pub name : Name, 54 | pub univ_params : Arc>, 55 | pub ty : Expr 56 | } 57 | 58 | 59 | impl Axiom { 60 | pub fn new(name : Name, univ_params : Arc>, ty : Expr) -> Self { 61 | Axiom { 62 | name, 63 | univ_params, 64 | ty 65 | } 66 | } 67 | } 68 | 69 | 70 | /// Lean definition, as you would introduce with the `def` or `definition` 71 | /// keywords. Has a name, universe parameters, a type, and a value. Lemmas 72 | /// are also considered definitions. Follows the pattern: 73 | #[derive(Debug, Clone)] 74 | pub struct Definition { 75 | pub name : Name, 76 | pub univ_params : Arc>, 77 | pub ty : Expr, 78 | pub val : Expr 79 | } 80 | 81 | impl Definition { 82 | pub fn new(name : Name, 83 | univ_params : Arc>, 84 | ty : Expr, 85 | val : Expr) -> Self { 86 | Definition { 87 | name, 88 | univ_params, 89 | ty, 90 | val 91 | } 92 | } 93 | 94 | } 95 | 96 | 97 | impl Declaration { 98 | pub fn mk(name: Name, 99 | univ_params: Arc>, 100 | ty: Expr, 101 | height: Option, 102 | builtin: Option) 103 | -> Self { 104 | Declaration { 105 | name, 106 | univ_params, 107 | ty, 108 | height : height.unwrap_or(0u16), 109 | builtin : builtin.unwrap_or(false) 110 | } 111 | } 112 | 113 | pub fn to_axiom(&self) -> Modification { 114 | assert!(self.univ_params.iter().all(|x| x.is_param())); 115 | Modification::AxiomMod(Axiom::new(self.name.clone(), self.univ_params.clone(), self.ty.clone())) 116 | } 117 | 118 | pub fn indep_declaration_check(&self, env : Arc>) { 119 | let mut tc = TypeChecker::new(None, env); 120 | self.declaration_check(&mut tc); 121 | } 122 | 123 | pub fn declaration_check(&self, tc : &mut TypeChecker) { 124 | assert!(univ_params_subset(&self.ty, &self.univ_params 125 | .iter() 126 | .collect::>())); 127 | assert!(!self.ty.has_vars()); 128 | assert!(!self.ty.has_locals()); 129 | tc.infer_universe_of_type(&self.ty); 130 | } 131 | 132 | 133 | } 134 | 135 | 136 | 137 | /** This is the thing we actually add to the environment and type check. 138 | They have the following strucutres : 139 | Axiom : Has one `Declaration` to add to the environment. 140 | CompiledDefinition : Has one `Declaration`, one `ReductionRule`, as well 141 | as a type (a pi expr) and a value (a lambda expr). 142 | The latter two are only type checked, not added to 143 | the environment. 144 | Quot : Has four Declarations rules (quot, quot.mk, quot.lift, quot.ind) 145 | and one reduction rule. 146 | Inductive : Has its base type as a `Declaration`, a sequence of `Declaration` 147 | items representing its introduction rules, a `Declaration` 148 | representing its elimination rule, and a sequence of 149 | `ReductionRule`s. */ 150 | #[derive(Debug, Clone)] 151 | pub enum CompiledModification { 152 | CompiledAxiomMod (Declaration), 153 | CompiledDefinition (Declaration, ReductionRule, Expr, Expr), 154 | // Type, and Value 155 | CompiledQuotMod (Vec, ReductionRule), 156 | CompiledInductive (Declaration, Vec, Declaration, Vec), 157 | // (base_type_axiom, intro_declarations, elim_declaration(rec), reduction_rules) 158 | } 159 | 160 | 161 | /// As with the other types, we have to wrap these in a way that feels 162 | /// a little bit excessive to get the behavior we want, which is that 163 | /// functions and collections can sometimes accept any `Modification`, 164 | /// and other times discriminate between IE a `DefMod` and an `Inductive`. 165 | /// We can't use a trait to tie everything together since we need 166 | /// to have collections of Modifications, and heterogeneous collections 167 | /// built over traits of different types would mean a large performance 168 | /// hit from dynamic dispatch. 169 | #[derive(Clone)] 170 | pub enum Modification { 171 | AxiomMod (Axiom), 172 | DefMod (Definition), 173 | QuotMod (Quot), 174 | IndMod (crate::inductive::ProtoInd), 175 | } 176 | 177 | 178 | impl Env { 179 | pub fn new(num_mods : usize) -> Self { 180 | Env { 181 | declarations : HashMap::with_capacity(num_mods), 182 | reduction_map : ReductionMap::new(num_mods), 183 | notations : HashMap::with_capacity(500) 184 | } 185 | } 186 | 187 | pub fn get_declaration_height(&self, name : &Name) -> Option { 188 | self.declarations.get(name).map(|dec| dec.height) 189 | } 190 | 191 | pub fn insert_declaration(&mut self, d : Declaration) { 192 | self.declarations.insert(d.name.clone(), d); 193 | } 194 | 195 | pub fn insert_reduction_rule(&mut self, r : ReductionRule) { 196 | self.reduction_map.add_rule(r); 197 | } 198 | 199 | pub fn get_value(&self, n : &Name) -> Option<&Expr> { 200 | self.reduction_map.get_value(n) 201 | } 202 | 203 | pub fn add_notation(&mut self, n : &Name, notation: Notation) { 204 | match self.notations.get(n) { 205 | Some(_) => (), 206 | None => { self.notations.insert(n.clone(), notation); } 207 | } 208 | } 209 | 210 | pub fn num_declars(&self) -> usize { 211 | self.declarations.len() 212 | } 213 | 214 | 215 | 216 | } 217 | 218 | impl Modification { 219 | pub fn compile(self, env : &Arc>) -> CompiledModification { 220 | match self { 221 | AxiomMod(axiom) => { 222 | let derived_declaration = Declaration::mk(axiom.name, 223 | axiom.univ_params, 224 | axiom.ty, 225 | None, 226 | None); 227 | CompiledAxiomMod(derived_declaration) 228 | }, 229 | DefMod(def) => { 230 | let height = 231 | match unique_const_names(&def.val) 232 | .iter() 233 | .filter_map(|name| env.read().get_declaration_height(&name)) 234 | .max() { 235 | Some(h) => h + 1, 236 | None => 1 237 | }; 238 | let derived_declaration = 239 | Declaration::mk(def.name.clone(), 240 | def.univ_params.clone(), 241 | def.ty.clone(), 242 | Some(height), 243 | None); 244 | let derived_reduction_rule = 245 | ReductionRule::new_rr(mk_const(def.name, def.univ_params), 246 | def.val.clone(), 247 | Vec::new()); 248 | CompiledDefinition(derived_declaration, 249 | derived_reduction_rule, 250 | def.ty, 251 | def.val) 252 | }, 253 | QuotMod(quot) => quot.compile_self(), 254 | IndMod(ind) => { 255 | let ind = Inductive::new( 256 | ind.name, 257 | ind.params, 258 | ind.ty, 259 | ind.num_params, 260 | ind.intros, 261 | env.clone() 262 | ); 263 | ind.compile(&env.clone()) 264 | } 265 | 266 | } 267 | } 268 | } 269 | 270 | 271 | 272 | 273 | impl CompiledModification { 274 | // All this does is add the (as of yet unchecked) item to 275 | // the environment. We then have to come back and check it later. 276 | pub fn add_only(&self, env : &Arc>) { 277 | let mut write_guard = env.write(); 278 | match self { 279 | CompiledAxiomMod(declaration) => { 280 | write_guard.insert_declaration(declaration.clone()); 281 | }, 282 | CompiledDefinition(declaration, rule, ..) => { 283 | write_guard.insert_declaration(declaration.clone()); 284 | write_guard.insert_reduction_rule(rule.clone()); 285 | }, 286 | CompiledQuotMod(declarations, rule) => { 287 | for d in declarations { 288 | write_guard.insert_declaration(d.clone()); 289 | } 290 | write_guard.insert_reduction_rule(rule.clone()); 291 | }, 292 | CompiledInductive(_base_type, intros, elim_declaration, reductions) => { 293 | for d in intros { 294 | write_guard.insert_declaration(d.clone()); 295 | } 296 | 297 | write_guard.insert_declaration(elim_declaration.clone()); 298 | 299 | for r in reductions { 300 | write_guard.insert_reduction_rule(r.clone()) 301 | } 302 | 303 | } 304 | } 305 | } 306 | 307 | // Checks a given item. 308 | pub fn check_only(&self, env : &Arc>) { 309 | match self { 310 | CompiledAxiomMod(declaration) => { 311 | let mut tc = TypeChecker::new(None, env.clone()); 312 | declaration.declaration_check(&mut tc); 313 | }, 314 | CompiledDefinition(declaration, _, ty, val) => { 315 | let mut tc = TypeChecker::new(None, env.clone()); 316 | declaration.declaration_check(&mut tc); 317 | tc.check_type(val, ty); 318 | }, 319 | CompiledQuotMod(declarations, _) => { 320 | for d in declarations { 321 | d.indep_declaration_check(env.clone()); 322 | } 323 | }, 324 | CompiledInductive(base_type, intros, ..) => { 325 | for d in Some(base_type).into_iter().chain(intros.into_iter()) { 326 | d.indep_declaration_check(env.clone()); 327 | } 328 | } 329 | } 330 | } 331 | } 332 | 333 | -------------------------------------------------------------------------------- /src/errors.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Debug; 2 | 3 | /// Most of these are errors that get thrown in the event 4 | /// that a pattern match expects something that it doesn't end up getting. 5 | /// For instance, partial conversions or failed definitional equality/ 6 | /// inference checks. Ideally we would get rid of the ones related to partial functions, 7 | /// but Rust's type system doesn't offer discrimination of enum variants 8 | /// at the type level, and my experience trying to break each enum variant 9 | /// out into its own struct suggested that the amount of extra code you would need 10 | /// just to do explicit casting between types would be a huge hit to readability 11 | /// and directness. 12 | 13 | 14 | pub fn err_get_serial(loc : u32, owise : &T) -> ! { 15 | eprintln!("expr line {}; Expr::get_serial is a partial function defined only on expresisons made with the `Local` constructor, but it was called with {:?}\n", loc, owise); 16 | std::process::exit(-1); 17 | } 18 | 19 | pub fn err_lc_binding(loc : u32, owise : &T) -> ! { 20 | eprintln!("expr line {}; Expr::get_serial is a partial function defined only on expresisons made with the `Local` constructor, but it was called with {:?}\n", loc, owise); 21 | std::process::exit(-1); 22 | } 23 | 24 | pub fn err_binding_lc(loc : u32, owise : &T) -> ! { 25 | eprintln!("`expr line {}; From` conversion for Level -> Binding is a partial function defined only on arguments of the form Expr::Local, but it was called with the following expression {:?}\n\n", loc, owise); 26 | std::process::exit(-1); 27 | } 28 | 29 | pub fn err_swap_local_binding_name(loc : u32, owise : &T) -> !{ 30 | eprintln!("expr line {}; Expr::swap_local_binding_name is a partial function defined only on expresisons made with the `Local` constructor, but it was called with {:?}\n", loc, owise); 31 | std::process::exit(-1); 32 | } 33 | 34 | pub fn err_offset_cache(loc : u32, idx : usize, len : usize) -> ! { 35 | eprintln!("expr line {}; OffsetCache failed to retrieve HashMap at index {}; vec length was {}\n", loc, idx, len); 36 | std::process::exit(-1); 37 | } 38 | 39 | pub fn err_normalize_pis(loc : u32, got : &T) -> ! { 40 | eprintln!("expr line {}; Expected a `Sort` term in inductive mod, got {:?}\n", loc, got); 41 | std::process::exit(-1); 42 | } 43 | 44 | pub fn err_infer_var(loc : u32, got : &T) -> ! { 45 | eprintln!("tc line {}; infer function got a variable term, but that should never happen. received this term : {:?}\n", loc, got); 46 | std::process::exit(-1); 47 | } 48 | 49 | pub fn err_infer_const(loc : u32, name : &T) -> ! { 50 | eprintln!("tc line {}; infer_const function expected a declaration to be in the environment, but it was missing. Looked for {:?}\n", loc, name); 51 | std::process::exit(-1); 52 | } 53 | 54 | pub fn err_infer_universe(loc : u32, got : &T) -> ! { 55 | eprintln!("tc line {}; infer_universe function expected to be passed a term of type Sort, but got something else. Got term {:?}\n", loc, got); 56 | std::process::exit(-1); 57 | } 58 | 59 | pub fn err_infer_apps(loc : u32, got : &T) -> ! { 60 | eprintln!("tc line {}; infer_apps function expected to be match a Pi term, but got something else. Got term {:?}\n", loc, got); 61 | std::process::exit(-1); 62 | } 63 | 64 | pub fn err_req_def_eq(loc : u32, got1 : &T, got2 : &T) -> ! { 65 | eprintln!("tc line {}; function require_def_eq received the following two functions expecting them to be found definitionally equal, but they were found not to be. Got E1 : {:?}\n\nE2 : {:?}\n\n", loc, got1, got2); 66 | std::process::exit(-1); 67 | } 68 | 69 | pub fn err_check_type(loc : u32, got1 : &T, got2 : &T) -> ! { 70 | eprintln!("tc line {}; the function check_type expected the following two expression to be definitionally equal, but they were not. Got E1 : {:?}\n\nE2 : {:?}\n\n", loc, got1, got2); 71 | std::process::exit(-1); 72 | } 73 | 74 | pub fn err_rr_const(loc : u32, got : &T) -> ! { 75 | eprintln!("rr line {}; creation of new reduction rule expected to get a Const expression, but got {:?}\n", loc, got); 76 | std::process::exit(-1); 77 | } 78 | 79 | pub fn err_add_rule(loc : u32, name : &T) -> ! { 80 | eprintln!("env line {}; in reduction module, expected to find a major premise corresponding to name {:?}, but got nothing.", loc, name); 81 | std::process::exit(-1) 82 | } 83 | 84 | pub fn err_param_name(loc : u32, got : &T) -> ! { 85 | eprintln!("level line {}; Level::param_name() is a partial function defined only for Param variants. Got {:?}\n", loc, got); 86 | std::process::exit(-1) 87 | } 88 | 89 | 90 | pub fn join_panic(loc : u32) -> ! { 91 | eprintln!("main line {}; a worker thread in the `check_parallel` function panicked! More information should be available in the console.", loc); 92 | std::process::exit(-1) 93 | } 94 | 95 | 96 | pub fn scope_err(loc : u32) -> ! { 97 | eprintln!("main line {}; a worker thread in the `check_parallel` function panicked! More information should be available in the console.", loc); 98 | std::process::exit(-1) 99 | } 100 | 101 | 102 | pub fn export_file_parse_err(loc : u32, err : T) -> ! { 103 | eprintln!("cli line {}; failed to parse at least one of the specified export files. Please check that the file exists at the specified path. Error details : {}\n", loc, err); 104 | std::process::exit(-1) 105 | } 106 | 107 | pub fn partial_is_pi(loc : u32, item : T) -> ! { 108 | eprintln!("expr line {}; bad call to partial function `binder_is_pi`; expected Pi or Labmda, got {:?}\n", loc, item); 109 | std::process::exit(-1); 110 | } 111 | 112 | pub fn err_parse_kind(t : &T) -> String { 113 | format!("unrecognized match on item kind while parsing. Expected 'N' 'U', or 'E', got {:?}\n", t) 114 | } 115 | 116 | -------------------------------------------------------------------------------- /src/expr.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use std::cmp::max; 3 | use std::sync::atomic::AtomicU64; 4 | use std::sync::atomic::Ordering::Relaxed; 5 | use std::hash::{ Hash, Hasher }; 6 | 7 | use fxhash::hash64; 8 | use hashbrown::{ HashMap, HashSet }; 9 | 10 | use crate::name::{ Name, mk_anon }; 11 | use crate::level::{ Level, unique_univ_params, mk_zero }; 12 | use crate::utils::{ safe_minus_one, max3 }; 13 | use crate::errors; 14 | 15 | use InnerExpr::*; 16 | 17 | /// Because we calculate hashes based on structure, we need 18 | /// something to distinguish between Pi and Lambda expressions, which 19 | /// apart from their enum discriminant, have the same structure internally. 20 | /// We just use a prime number in a (probably futile) attempt to reduce 21 | /// the likelihood of hash collisions since they'll often be kept in hash maps. 22 | /// Prop itself is treated as a constant, so we also need to know its hash 23 | /// beforehand. 24 | pub const LAMBDA_HASH : u64 = 402653189; 25 | pub const PI_HASH : u64 = 1610612741; 26 | pub const PROP_HASH : u64 = 786433; 27 | pub const PROP_CACHE : ExprCache = ExprCache { digest : PROP_HASH, 28 | var_bound : 0, 29 | has_locals : false }; 30 | 31 | /// Globally visible incrementing counter for fresh Local names. 32 | /// Lazy man's way of creating fresh names across threads. 33 | /// `Local` items need to have the property that two locals will 34 | /// have the same serial iff `B` was created by executing `clone()` 35 | /// on `A`. 36 | pub static LOCAL_SERIAL : AtomicU64 = AtomicU64::new(0); 37 | 38 | 39 | 40 | 41 | /// Denote different flavors of binders. 42 | /// Each variant corresponds to the following Lean binder notation : (click for info) 43 | ///``` pseudo 44 | ///Default |-> ( .. ) 45 | ///Implicit |-> { .. } 46 | ///StrictImplicit |-> {{ .. }} 47 | ///InstImplicit |-> [ .. ] 48 | ///``` 49 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] 50 | pub enum BinderStyle { 51 | Default, 52 | Implicit, 53 | StrictImplicit, 54 | InstImplicit, 55 | } 56 | 57 | 58 | /// Binding is used to represent the information associated with a Pi, Lambda, or Let 59 | /// expression's binding. pp_name and ty would be like the `x` and `T` respectively 60 | /// in `(λ x : T, E)`. See the doc comments for BinderStyle for information on that. 61 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 62 | pub struct Binding { 63 | pub pp_name : Name, 64 | pub ty : Expr, 65 | pub style : BinderStyle 66 | } 67 | 68 | 69 | impl Binding { 70 | pub fn mk(name : Name, ty : Expr, style : BinderStyle) -> Self { 71 | Binding { 72 | pp_name : name, 73 | ty : ty, 74 | style 75 | } 76 | } 77 | 78 | pub fn as_local(self) -> Expr { 79 | let serial = LOCAL_SERIAL.fetch_add(1, Relaxed); 80 | let digest = hash64(&(serial, &self)); 81 | Local(ExprCache::mk(digest, 0, true), serial, self).into() 82 | } 83 | 84 | pub fn swap_ty(&self, other : Expr) -> Self { 85 | Binding::mk(self.pp_name.clone(), other, self.style) 86 | } 87 | 88 | pub fn swap_name(&self, other : Name) -> Self { 89 | Binding::mk(other, self.ty.clone(), self.style) 90 | } 91 | 92 | pub fn swap_name_and_ty(&self, other_n : Name, other_t : Expr) -> Self { 93 | Binding::mk(other_n, other_t, self.style) 94 | } 95 | 96 | } 97 | 98 | /// Arc wrapper around `InnerExpr`. See InnerExpr's docs. 99 | #[derive(Clone, PartialEq, Eq, Hash)] 100 | pub struct Expr(Arc); 101 | 102 | impl std::fmt::Debug for Expr { 103 | fn fmt(&self, f : &mut std::fmt::Formatter) -> std::fmt::Result { 104 | write!(f, "{:?}", self.as_ref()) 105 | } 106 | } 107 | 108 | 109 | 110 | /// special constructor for an Expr::Sort that corresponds to `Prop` 111 | pub fn mk_prop() -> Expr { 112 | Sort(PROP_CACHE, mk_zero()).into() // into Level from InnerLevel 113 | } 114 | 115 | 116 | /// Makes a variable expression which contains a 117 | /// [De Brujin index](https://en.wikipedia.org/wiki/De_Bruijn_index). 118 | pub fn mk_var(idx : u64) -> Expr { 119 | let digest = hash64(&(idx)); 120 | Var(ExprCache::mk(digest, idx as u16 + 1, false), idx).into() // InnerLevel -> Level 121 | } 122 | 123 | /// Makes a node in the tree, joining two expressions as application. 124 | pub fn mk_app(lhs : Expr, rhs : Expr) -> Expr { 125 | let digest = hash64(&(lhs.get_digest(), rhs.get_digest())); 126 | let var_bound = lhs.var_bound().max(rhs.var_bound()); 127 | let has_locals = lhs.has_locals() || rhs.has_locals(); 128 | App(ExprCache::mk(digest, var_bound, has_locals), lhs, rhs).into() // InnerLevel -> Level 129 | } 130 | 131 | /// Represents a Sort/Level/Universe. You can read more about these in 132 | /// sources like Theorem Proving in Lean. 133 | pub fn mk_sort(level : Level) -> Expr { 134 | let digest = hash64(&level); 135 | Sort(ExprCache::mk(digest, 0, false), level).into() // InnerLevel -> Level 136 | } 137 | 138 | /// A constant; represents a reference to a declaration that has already 139 | /// been added to the environment. 140 | pub fn mk_const(name : impl Into, levels : impl Into>>) -> Expr { 141 | let name = name.into(); 142 | let levels = levels.into(); 143 | let digest = hash64(&(&name, &levels)); 144 | Const(ExprCache::mk(digest, 0, false), name, levels).into() 145 | } 146 | 147 | /// A lambda function. 148 | pub fn mk_lambda(domain : Binding, body: Expr) -> Expr { 149 | let digest = hash64(&(LAMBDA_HASH, &domain, body.get_digest())); 150 | let var_bound = max(domain.ty.var_bound(), 151 | safe_minus_one(body.var_bound())); 152 | let has_locals = domain.ty.has_locals() || body.has_locals(); 153 | Lambda(ExprCache::mk(digest, var_bound, has_locals), domain, body).into() // InnerLevel -> Level 154 | } 155 | 156 | /// A Pi (dependent function) type. 157 | pub fn mk_pi(domain : Binding, body: Expr) -> Expr { 158 | let digest = hash64(&(PI_HASH, &domain, body.get_digest())); 159 | let var_bound = max(domain.ty.var_bound(), 160 | safe_minus_one(body.var_bound())); 161 | let has_locals = domain.ty.has_locals() || body.has_locals(); 162 | Pi(ExprCache::mk(digest, var_bound, has_locals), domain, body).into() // InnerLevel -> Level 163 | } 164 | 165 | /// A let binding, IE `let (x : nat) := 5 in 2 * x` 166 | pub fn mk_let(domain : Binding, val : Expr, body : Expr) -> Expr { 167 | let digest = hash64(&(&domain, val.get_digest(), body.get_digest())); 168 | let var_bound = max3(domain.ty.var_bound(), 169 | val.var_bound(), 170 | safe_minus_one(body.var_bound())); 171 | let has_locals = domain.ty.has_locals() || body.has_locals() || val.has_locals(); 172 | Let(ExprCache::mk(digest, var_bound, has_locals), domain, val, body).into() // InnerLevel -> Level 173 | } 174 | 175 | /// A `Local` represents a free variable. All `Local` terms have a unique 176 | /// identifier (here we just use a monotonically increasing counter, with each 177 | /// local's identifier being called a `serial`), and carries its type around. 178 | /// As discussed above, locals must have the property that a clone/deep copy 179 | /// is the only way to produce two local items with the same serial. All other 180 | /// methods of constructing a local must produce a term with a unique identifier. 181 | pub fn mk_local(name : impl Into, ty : Expr, style : BinderStyle) -> Expr { 182 | let binding = Binding::mk(name.into(), ty, style); 183 | let serial = LOCAL_SERIAL.fetch_add(1, Relaxed); 184 | let digest = hash64(&(serial, &binding)); 185 | 186 | Local(ExprCache::mk(digest, 0, true), 187 | serial, 188 | binding).into() // InnerLevel -> Level 189 | } 190 | 191 | 192 | impl Expr { 193 | 194 | pub fn is_local(&self) -> bool { 195 | match self.as_ref() { 196 | Local(..) => true, 197 | _ => false 198 | } 199 | } 200 | 201 | pub fn get_digest(&self) -> u64 { 202 | self.as_ref().get_cache().digest 203 | } 204 | 205 | pub fn has_locals(&self) -> bool { 206 | self.as_ref().get_cache().has_locals 207 | } 208 | 209 | pub fn has_vars(&self) -> bool { 210 | self.as_ref().get_cache().var_bound > 0 211 | } 212 | 213 | pub fn var_bound(&self) -> u16 { 214 | self.as_ref().get_cache().var_bound 215 | } 216 | 217 | // !! Partial function !! 218 | pub fn lc_binding(&self) -> &Binding { 219 | match self.as_ref() { 220 | Local(.., binding) => binding, 221 | owise => errors::err_lc_binding(line!(), owise) 222 | } 223 | } 224 | 225 | // !! Partial function !! 226 | // only used once in the pretty printer. 227 | pub fn binder_is_pi(&self) -> bool { 228 | match self.as_ref() { 229 | Pi(..) => true, 230 | Lambda(..) => false, 231 | owise => errors::partial_is_pi(line!(), owise) 232 | } 233 | } 234 | 235 | /// Only used in the pretty printer. 236 | pub fn swap_local_binding_name(&self, new_name : &Name) -> Expr { 237 | match self.as_ref() { 238 | Local(.., serial, binding) => { 239 | let new_binding = Binding::mk(new_name.clone(), binding.ty.clone(), binding.style); 240 | let digest = hash64(&(serial, &binding)); 241 | 242 | Local(ExprCache::mk(digest, 0, true), 243 | *serial, 244 | new_binding).into() // InnerLevel -> Level 245 | }, 246 | owise => errors::err_swap_local_binding_name(line!(), owise), 247 | } 248 | } 249 | 250 | /// !! Partial function !! 251 | /// If the expression is a Local, returns its unique identifier/serial number. 252 | /// Else kills the program with a fatal error. 253 | pub fn get_serial(&self) -> u64 { 254 | match self.as_ref() { 255 | Local(_, serial, _) => *serial, 256 | owise => errors::err_get_serial(line!(), owise) 257 | } 258 | } 259 | 260 | /// This is the primitive joining of applying two expressions with the arrow 261 | /// constructor. Given some `e1` and `e2`, constructs `e1 → e2` by turning 262 | /// it into `Π (e1), e2` 263 | pub fn mk_arrow(&self, other : &Expr) -> Expr { 264 | let binding = Binding::mk(mk_anon(), self.clone(), BinderStyle::Default); 265 | mk_pi(binding, other.clone()) 266 | } 267 | 268 | 269 | /// The goal here is to traverse an expression, replacing `Local` terms with `Var` 270 | /// terms where possible, while caching terms we've already performed 271 | /// substitution on. 272 | /// It's a relatively generic traversal where we cache expressions to that we 273 | /// don't have to fully evaluate subtrees if we already know how they evaluate. 274 | /// The 'interesting' case is when we run across a Local `L` in our tree; we look 275 | /// in the collection `lcs` for a term `L'` such that `L' = L`. If there isn't one, 276 | /// just return `L`. If there IS one, we note the position/index of `L'` in `lcs`, 277 | /// create a variable whose inner index is pos(L'), and return the newly created 278 | /// variable. 279 | /// `offset` is used to mark the transition from one binder's scope into another; 280 | /// you can see that it only increments as we recurse into the body of a binder 281 | /// (Lambda, Pi, or Let term). 282 | pub fn abstract_<'e>(&self, lcs : impl Iterator + Clone) -> Expr { 283 | if !self.has_locals() { 284 | self.clone() 285 | } else { 286 | let mut cache = OffsetCache::new(); 287 | self.abstract_core(0usize, lcs.clone(), &mut cache) 288 | } 289 | } 290 | 291 | fn abstract_core<'e>(&self, offset : usize, locals : impl Iterator + Clone, cache : &mut OffsetCache) -> Expr { 292 | if !self.has_locals() { 293 | self.clone() 294 | } else if let Local(_, serial, _) = self.as_ref() { 295 | locals.clone() 296 | .position(|lc| lc.get_serial() == *serial) 297 | .map_or_else(|| self.clone(), |position| { 298 | mk_var((position + offset) as u64) 299 | }) 300 | } else { 301 | cache.get(self, offset).cloned().unwrap_or_else(|| { 302 | let result = match self.as_ref() { 303 | App(_, lhs, rhs) => { 304 | let new_lhs = lhs.abstract_core(offset, locals.clone(), cache); 305 | let new_rhs = rhs.abstract_core(offset, locals, cache); 306 | mk_app(new_lhs, new_rhs) 307 | }, 308 | Lambda(_, dom, body) => { 309 | let new_domty = dom.ty.abstract_core(offset, locals.clone(), cache); 310 | let new_body = body.abstract_core(offset + 1, locals, cache); 311 | mk_lambda(dom.swap_ty(new_domty), new_body) 312 | } 313 | Pi(_, dom, body) => { 314 | let new_domty = dom.ty.abstract_core(offset, locals.clone(), cache); 315 | let new_body = body.abstract_core(offset + 1, locals, cache); 316 | mk_pi(dom.swap_ty(new_domty), new_body) 317 | }, 318 | Let(_, dom, val, body) => { 319 | let new_domty = dom.ty.abstract_core(offset, locals.clone(), cache); 320 | let new_val = val.abstract_core(offset, locals.clone(), cache); 321 | let new_body = body.abstract_core(offset + 1, locals, cache); 322 | mk_let(dom.swap_ty(new_domty), new_val, new_body) 323 | }, 324 | owise => unreachable!("Illegal match item in Expr::abstract_core {:?}\n", owise) 325 | }; 326 | 327 | cache.insert(self.clone(), result.clone(), offset); 328 | result 329 | }) 330 | } 331 | } 332 | 333 | /// Similar shape to abstract; we traverse an expression, but this time we want 334 | /// to substitute variables for other expressions, stil carrying a cache and 335 | /// using an offset to track the transition into the body of successive binders. 336 | /// The interesting case this time is when we run across a Variable; we 337 | /// make sure the index is in bounds, then use it to index into the sequence 338 | /// `es`, replacing our Variable with `es`[idx]. 339 | /// `instantiate_core` is the single most time consuming part of running 340 | /// the type checker, with some of the expression trees it has to traverse 341 | /// spanning millions of nodes, so if you're going to implement a 342 | /// type checker yourself and you want it to be fast, figure out a way 343 | /// to make these functions efficient. 344 | pub fn instantiate<'e>(&self, es : impl Iterator + Clone) -> Expr { 345 | if self.var_bound() as usize == 0 { 346 | self.clone() 347 | } else { 348 | let mut cache = OffsetCache::new(); 349 | self.instantiate_core(0usize, es.clone(), &mut cache) 350 | } 351 | } 352 | 353 | // The way 'offset' works is that it pushes the index further left 354 | // in the vec it's indexing. Or you can think of it as pushing `None` values 355 | // onto the left of the collection, so an offset of 3 would become : 356 | // [None, None, None, e1, e2, e3, e4, e5] 357 | // 0 1 2 3 4 5 6 7 358 | fn instantiate_core<'e>(&self, offset : usize, es : impl Iterator + Clone, cache : &mut OffsetCache) -> Self { 359 | if self.var_bound() as usize <= offset { 360 | return self.clone() 361 | } else if let Var(_, idx_) = self.as_ref() { 362 | es.clone() 363 | .nth((*idx_ as usize) - offset) 364 | .cloned() 365 | .unwrap_or_else(|| self.clone()) 366 | } else { 367 | cache.get(&self, offset).cloned().unwrap_or_else(|| { 368 | let calcd = match self.as_ref() { 369 | App(_, lhs, rhs) => { 370 | let new_lhs = lhs.instantiate_core(offset, es.clone(), cache); 371 | let new_rhs = rhs.instantiate_core(offset, es, cache); 372 | mk_app(new_lhs, new_rhs) 373 | }, 374 | | Lambda(_, dom, body) => { 375 | let new_dom_ty = dom.ty.instantiate_core(offset, es.clone(), cache); 376 | let new_body = body.instantiate_core(offset + 1, es, cache); 377 | mk_lambda(dom.swap_ty(new_dom_ty), new_body) 378 | } 379 | | Pi(_, dom, body) => { 380 | let new_dom_ty = dom.ty.instantiate_core(offset, es.clone(), cache); 381 | let new_body = body.instantiate_core(offset + 1, es, cache); 382 | mk_pi(dom.swap_ty(new_dom_ty), new_body) 383 | }, 384 | Let(_, dom, val, body) => { 385 | let new_dom_ty = dom.ty.instantiate_core(offset, es.clone(), cache); 386 | let new_val = val.instantiate_core(offset, es.clone(), cache); 387 | let new_body = body.instantiate_core(offset + 1, es, cache); 388 | mk_let(dom.swap_ty(new_dom_ty), new_val, new_body) 389 | }, 390 | owise => unreachable!("Illegal match result in Expr::instantiate_core {:?}\n", owise) 391 | }; 392 | cache.insert(self.clone(), calcd.clone(), offset); 393 | calcd 394 | }) 395 | } 396 | } 397 | /// This just performs variable substitution by going through 398 | /// the `Level` items contained in `Sort` and `Const` expressions. 399 | /// For all levels therein, attempts to replace `Level::Param` 400 | /// items with something in the `substs` mapping, which maps 401 | /// (Level::Param |-> Level) 402 | pub fn instantiate_ps(&self, substs : &Vec<(Level, Level)>) -> Expr { 403 | if substs.iter().any(|(l, r)| l != r) { 404 | match self.as_ref() { 405 | App(_, lhs, rhs) => { 406 | let new_lhs = lhs.instantiate_ps(substs); 407 | let new_rhs = rhs.instantiate_ps(substs); 408 | mk_app(new_lhs, new_rhs) 409 | }, 410 | Lambda(_, dom, body) => { 411 | let new_domty = dom.ty.instantiate_ps(substs); 412 | let new_body = body.instantiate_ps(substs); 413 | mk_lambda(dom.swap_ty(new_domty), new_body) 414 | 415 | } 416 | Pi(_, dom, body) => { 417 | let new_domty = dom.ty.instantiate_ps(substs); 418 | let new_body = body.instantiate_ps(substs); 419 | mk_pi(dom.swap_ty(new_domty), new_body) 420 | }, 421 | 422 | Let(_, dom, val, body) => { 423 | let new_domty = dom.ty.instantiate_ps(substs); 424 | let new_val = val.instantiate_ps(substs); 425 | let new_body = body.instantiate_ps(substs); 426 | mk_let(dom.swap_ty(new_domty), new_val, new_body) 427 | 428 | }, 429 | Local(.., of) => { 430 | let new_of_ty = of.ty.instantiate_ps(substs); 431 | of.swap_ty(new_of_ty).as_local() 432 | }, 433 | Var(..) => self.clone(), 434 | Sort(_, lvl) => { 435 | let instd_level = lvl.instantiate_lvl(substs); 436 | mk_sort(instd_level) 437 | }, 438 | Const(_, name, lvls) => { 439 | let new_levels = lvls.iter() 440 | .map(|x| (x.instantiate_lvl(substs))) 441 | .collect::>(); 442 | mk_const(name.clone(), new_levels) 443 | } 444 | } 445 | } else { 446 | self.clone() 447 | } 448 | } 449 | 450 | 451 | /// Note for non-rust users, IntoIterator is idempotent over Iterators; if 452 | /// we pass this something that's already an interator, nothing happens. 453 | /// But if we pass it something that isnt YET an iterator, it will turn 454 | /// it into one for us. Given a list of expressions [X_1, X_2, ... X_n] and 455 | /// some expression F, iteratively apply the `App` constructor to get : 456 | ///```pseudo 457 | /// App( ... App(App(F, X_1), X_2)... X_n) 458 | /// 459 | /// App 460 | /// / \ 461 | /// App X_n... 462 | /// ... 463 | /// / \ 464 | /// App X_2... 465 | /// / \ 466 | /// F X_1... 467 | ///``` 468 | pub fn fold_apps<'q, Q>(&self, apps : Q) -> Expr 469 | where Q : IntoIterator { 470 | let mut acc = self.clone(); 471 | for next in apps { 472 | acc = mk_app(acc, next.clone()) 473 | } 474 | acc 475 | } 476 | 477 | 478 | /// From an already constructed tree, unfold all consecutive 479 | /// `App` constructors along their spine from right to left. 480 | ///```pseudo 481 | /// `App` nodes, and the bottom left expression `F`. 482 | /// 483 | /// App => (F, [X_n subtree, X_2 subtree, X_1 subtree]) 484 | /// / \ 485 | /// App X_n... 486 | /// ... 487 | /// / \ 488 | /// App X_2... 489 | /// / \ 490 | /// F X_1... 491 | ///``` 492 | pub fn unfold_apps_refs(&self) -> (&Expr, Vec<&Expr>) { 493 | let (mut _fn, mut acc) = (self, Vec::with_capacity(40)); 494 | while let App(_, f, app) = _fn.as_ref() { 495 | acc.push(app); 496 | _fn = f; 497 | } 498 | (_fn, acc) 499 | } 500 | 501 | 502 | 503 | /// Same as unfold_apps_refs, but returns owned values instead 504 | /// of references and returns the vector backwards. used a 505 | /// couple of times in inductive, and once in reduction. 506 | pub fn unfold_apps_special(&self) -> (Expr, Vec) { 507 | let (mut _fn, mut acc) = (self, Vec::with_capacity(10)); 508 | while let App(_, f, app) = _fn.as_ref() { 509 | acc.push((app).clone()); 510 | _fn = f; 511 | } 512 | acc.reverse(); 513 | (_fn.clone(), acc) 514 | } 515 | 516 | /// Given two expressions `E` and `L`, where `L` is known to be a Local : 517 | ///```pseudo 518 | /// let E = E.abstract(L) 519 | /// return (Π (L) (E')) 520 | ///``` 521 | pub fn apply_pi(&self, domain : &Expr) -> Expr { 522 | assert!(domain.is_local()); 523 | let abstracted = self.clone().abstract_(Some(domain).into_iter()); 524 | mk_pi(Binding::from(domain), abstracted) 525 | } 526 | 527 | 528 | /// Given a list of Local expressions [L_1, L_2, ... L_n] and a 529 | /// body `E : Expr`, use your favorite method (fold_right is 530 | /// nice) and the Pi constructor to make : 531 | /// 532 | ///```pseudo 533 | /// (Π L_1, (Π L_2, ... (Π L_n, E))) 534 | ///``` 535 | /// 536 | ///```pseudo 537 | /// Π 538 | /// / \ 539 | /// L_1 Π 540 | /// / \ 541 | /// L_2 ... 542 | /// Π 543 | /// / \ 544 | /// L_n E 545 | ///``` 546 | /// same as fold_pis, but generic over iterators. 547 | pub fn fold_pis<'q, Q>(&self, doms : Q) -> Expr 548 | where Q : Iterator + DoubleEndedIterator { 549 | let mut acc = self.clone(); 550 | for next in doms.rev() { 551 | acc = acc.apply_pi(next) 552 | } 553 | 554 | acc 555 | } 556 | 557 | /// This unfolds consecutive applications of `Pi` into the "core" term, 558 | /// and a list of binders pulled from the Pi applications. This is one of the few 559 | /// places where we use an in-place mutation since it's used iteratively and we don't 560 | /// want a bunch of vector allocations for no reason. 561 | /// An example of its application might look like : 562 | ///```pseudo 563 | /// let t = Π α, (Π β, (Π γ, E)) 564 | /// let binder_acc = [] 565 | /// ...unfold_pis(t) 566 | /// assert (t = E) && (binder_acc = [α, β, γ]) 567 | /// 568 | pub fn unfold_pis(&mut self, binder_acc : &mut Vec) { 569 | while let Pi(_, dom, body) = self.as_ref() { 570 | let local = dom.clone().as_local(); 571 | let instd = body.instantiate(Some(&local).into_iter()); 572 | binder_acc.push(local); 573 | std::mem::replace(self, instd); 574 | } 575 | } 576 | 577 | /// Given two expressions `E` and `L`, where `L` is known to be a Local, 578 | ///```pseudo 579 | /// let E' = E.abstract(L) 580 | /// return (λ L, E') 581 | ///``` 582 | pub fn apply_lambda(&self, domain : &Expr) -> Expr { 583 | assert!(domain.is_local()); 584 | let abstracted = self.clone().abstract_(Some(domain).into_iter()); 585 | mk_lambda(Binding::from(domain), abstracted) 586 | } 587 | 588 | 589 | /// Given a list of Local expressions [L_1, L_2, ... L_n] and a body `E : Expr`, 590 | /// use your favorite method (here we use a right fold) and the Lambda constructor to make : 591 | ///```pseudo 592 | /// (λ L_1, (λ L_2, ... (λ L_n, E))) 593 | /// 594 | /// λ 595 | /// / \ 596 | /// L_1 λ 597 | /// / \ 598 | /// L_2 ... 599 | /// λ 600 | /// / \ 601 | /// L_n E 602 | ///``` 603 | /// same as fold_lambdas, but generic over iterators. 604 | pub fn fold_lambdas<'q, Q>(&self, doms : Q) -> Expr 605 | where Q : Iterator + DoubleEndedIterator { 606 | let mut acc = self.clone(); 607 | for next in doms.rev() { 608 | acc = acc.apply_lambda(next) 609 | } 610 | acc 611 | } 612 | 613 | } 614 | 615 | impl Hash for InnerExpr { 616 | fn hash(&self, state : &mut H) { 617 | self.get_digest().hash(state); 618 | } 619 | } 620 | 621 | 622 | #[derive(Clone, PartialEq, Eq)] 623 | pub enum InnerExpr { 624 | Var (ExprCache, u64), 625 | Sort (ExprCache, Level), 626 | Const (ExprCache, Name, Arc>), 627 | Local (ExprCache, u64, Binding), 628 | App (ExprCache, Expr, Expr), 629 | Lambda (ExprCache, Binding, Expr), 630 | Pi (ExprCache, Binding, Expr), 631 | Let (ExprCache, Binding, Expr, Expr) 632 | } 633 | 634 | impl InnerExpr { 635 | pub fn get_digest(&self) -> u64 { 636 | self.get_cache().digest 637 | } 638 | 639 | pub fn get_cache(&self) -> ExprCache { 640 | match self { 641 | | Var (info, ..) 642 | | Sort (info, ..) 643 | | Const (info, ..) 644 | | Local (info, ..) 645 | | App (info, ..) 646 | | Lambda (info, ..) 647 | | Pi (info, ..) 648 | | Let (info, ..) => *info 649 | } 650 | } 651 | } 652 | 653 | 654 | 655 | /// Caches an expression's hash digest, number of bound variables, and whether 656 | /// or not it contains locals. The important part of this is it's calculated 657 | /// as an expression tree is constructed, where each node's cache captures 658 | /// the information for itself and for its entire subtree, since IE the hash digest 659 | /// is the digest of its component nodes, which are in turn the comopsition of THEIR 660 | /// component nodes, etc. This is extremely important for performance reasons 661 | /// since we want to do things like use hash tables as caches. If we implemented things 662 | /// naively, we would be rehashing the entire tree every time we wanted to look a term 663 | /// up in a cache, and expression trees can get very, very large. Instead we tell 664 | /// all hash-keyed data structures to (more or less) pull the cache.digest value off 665 | /// and use that instead. 666 | #[derive(Copy, Clone, PartialEq, Eq, Hash)] 667 | pub struct ExprCache { 668 | digest : u64, 669 | var_bound : u16, 670 | has_locals : bool, 671 | } 672 | 673 | impl std::fmt::Debug for ExprCache { 674 | fn fmt(&self, f : &mut std::fmt::Formatter) -> std::fmt::Result { 675 | write!(f, "") 676 | } 677 | } 678 | 679 | 680 | impl ExprCache { 681 | pub fn mk(digest : u64, var_bound : u16, has_locals : bool) -> Self { 682 | ExprCache { 683 | digest, 684 | var_bound, 685 | has_locals, 686 | } 687 | } 688 | 689 | } 690 | 691 | 692 | impl std::convert::AsRef for Expr { 693 | fn as_ref(&self) -> &InnerExpr { 694 | match self { 695 | Expr(arc) => arc.as_ref() 696 | } 697 | } 698 | } 699 | 700 | impl From for Expr { 701 | fn from(x : InnerExpr) -> Expr { 702 | Expr(Arc::new(x)) 703 | } 704 | } 705 | 706 | 707 | // !! Partial function !! 708 | impl From<&Expr> for Binding { 709 | fn from(e : &Expr) -> Binding { 710 | match e.as_ref() { 711 | Local(.., binding) => binding.clone(), 712 | owise => errors::err_binding_lc(line!(), owise), 713 | } 714 | } 715 | } 716 | 717 | 718 | /// Mapping of ((Expr × int) |-> Expr) that says "(expression A at offset B) 719 | /// maps to (expression C)". There are multiple ways to do this, 720 | /// but this way of doing itturned out to be (much to my surprise, 721 | /// shout-outs to @GEbner) faster than (Expr x Int) -> Expr, probably 722 | /// due in large part because of how tuples work in Rust. 723 | 724 | pub struct OffsetCache(Vec>); 725 | 726 | impl OffsetCache { 727 | pub fn new() -> Self { 728 | OffsetCache(Vec::with_capacity(200)) 729 | } 730 | 731 | 732 | pub fn get(&self, e : &Expr, offset : usize) -> Option<&Expr> { 733 | match self { 734 | OffsetCache(inner) => inner.get(offset)?.get(e) 735 | } 736 | } 737 | 738 | pub fn insert(&mut self, e1 : Expr, e2 : Expr, offset : usize) { 739 | let map_vec = match self { 740 | OffsetCache(x) => x 741 | }; 742 | 743 | while map_vec.len() <= offset { 744 | map_vec.push(HashMap::with_capacity(50)); 745 | } 746 | 747 | match map_vec.get_mut(offset) { 748 | Some(v) => v.insert(e1, e2), 749 | None => errors::err_offset_cache(line!(), offset, map_vec.len()), 750 | }; 751 | } 752 | 753 | } 754 | 755 | /// For some expression `E`, traverse `E`, putting the `Name` field 756 | /// of any constant into a set `S`. This is only used once, when compiling 757 | /// a `Definition`; we get all of the names out of an expression's constant terms, 758 | /// and use them to look up the height of those definitions in the environment. 759 | /// There's more information about definition height under tc::def_height(). 760 | /// This isn't defined as an associated method because it wanted more 761 | /// detailed lifetime information than could be provided by `self`. 762 | pub fn unique_const_names<'l, 's>(n : &'l Expr) -> HashSet<&'l Name> { 763 | let mut acc = HashSet::with_capacity(80); 764 | let mut cache = HashSet::with_capacity(200); 765 | unique_const_names_core(n, &mut acc, &mut cache); 766 | acc 767 | } 768 | 769 | pub fn unique_const_names_core<'l, 's>(n : &'l Expr, 770 | s : &'s mut HashSet<&'l Name>, 771 | cache : &'s mut HashSet<&'l Expr>) { 772 | if cache.contains(n) { 773 | return 774 | } else { 775 | match n.as_ref() { 776 | App(_, lhs, rhs) => { 777 | unique_const_names_core(lhs, s, cache); 778 | unique_const_names_core(rhs, s, cache); 779 | }, 780 | | Lambda(_, dom, body) 781 | | Pi(_, dom, body) => { 782 | unique_const_names_core(&dom.ty, s, cache); 783 | unique_const_names_core(&body, s, cache); 784 | 785 | }, 786 | Let(_, dom, val, body) => { 787 | unique_const_names_core(&dom.ty, s, cache); 788 | unique_const_names_core(&val, s, cache); 789 | unique_const_names_core(&body, s, cache); 790 | }, 791 | Const(_, name, _) => { 792 | s.insert(name); 793 | }, 794 | _ => (), 795 | }; 796 | cache.insert(n); 797 | } 798 | } 799 | 800 | /// Given some expression `E` and a set of levels `S_X`, collect all 801 | /// Level::Param elements in `E` into a set `S_E`, and determine whether 802 | /// or not `S_E` is a subset of `S_X`. This only gets used once, in 803 | /// the process of checking the type field of a `Declaration`, in order 804 | /// to ensure that all of the universe parameters being used in some 805 | /// declaration's type are properly declared in it's separate 806 | /// `univ_params` field. 807 | pub fn univ_params_subset<'l, 's>(e : &'l Expr, other : &'s HashSet<&'l Level>) -> bool { 808 | let mut const_names_in_e = HashSet::with_capacity(40); 809 | univ_params_subset_core(e, &mut const_names_in_e); 810 | 811 | const_names_in_e.is_subset(&other) 812 | } 813 | 814 | fn univ_params_subset_core<'l, 's>(e : &'l Expr, s : &'s mut HashSet<&'l Level>) { 815 | match e.as_ref() { 816 | App(_, lhs, rhs) => { 817 | univ_params_subset_core(lhs, s); 818 | univ_params_subset_core(rhs, s); 819 | }, 820 | | Lambda(_, dom, body) 821 | | Pi(_, dom, body) => { 822 | univ_params_subset_core(&dom.ty, s); 823 | univ_params_subset_core(body, s); 824 | }, 825 | Let(_, dom, val, body) => { 826 | univ_params_subset_core(&dom.ty, s); 827 | univ_params_subset_core(val, s); 828 | univ_params_subset_core(body, s); 829 | }, 830 | Sort(_, lvl) => { s.extend(unique_univ_params(lvl)); }, 831 | Const(.., lvls) => for lvl in lvls.as_ref() { 832 | s.extend(unique_univ_params(lvl)); 833 | }, 834 | _ => () 835 | } 836 | } 837 | 838 | 839 | 840 | impl std::fmt::Debug for InnerExpr { 841 | fn fmt(&self, f : &mut std::fmt::Formatter) -> std::fmt::Result { 842 | match self { 843 | Var(_, idx) => { 844 | write!(f, "Var({})", idx) 845 | }, 846 | Sort(_, lvl) => { 847 | write!(f, "Sort({:?})", lvl) 848 | }, 849 | Const(_, name, lvls) => { 850 | write!(f, "Const({:?}, {:?})", name, lvls) 851 | }, 852 | App(_, e1, e2) => { 853 | write!(f, "App({:?}, {:?})", e1, e2) 854 | }, 855 | Lambda(_, dom, body) => { 856 | write!(f, "(λ ({:?}), {:?})", dom, body) 857 | }, 858 | Pi(_, dom, body) => { 859 | write!(f, "(Π ({:?}), {:?})", dom, body) 860 | }, 861 | Let(_, dom, val, body) => { 862 | write!(f, "let {:?} := {:?} in {:?}", dom, val, body) 863 | }, 864 | Local(_, serial, of) => { 865 | let truncated = serial.to_string().chars().take(6).collect::(); 866 | write!(f, "Local(serial : {:?}, of : {:?}", truncated, of) 867 | } 868 | } 869 | } 870 | } 871 | -------------------------------------------------------------------------------- /src/inductive.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use hashbrown::HashSet; 4 | use parking_lot::RwLock; 5 | 6 | use crate::seq; 7 | use crate::name::Name; 8 | use crate::level::{ Level, mk_param, mk_zero }; 9 | use crate::reduction::ReductionRule; 10 | use crate::env::{ Env, Declaration, CompiledModification }; 11 | use crate::tc::TypeChecker; 12 | use crate::utils::{ Either, Either::* }; 13 | use crate::errors; 14 | use crate::expr::{ Expr, 15 | Binding, 16 | BinderStyle, 17 | InnerExpr::*, 18 | mk_const, 19 | mk_sort, 20 | mk_local, 21 | mk_app }; 22 | 23 | 24 | /// This module implements inductive types. The general flow is: 25 | /// 1. The parser collects the elements needed to call `Inductive::new(..)` 26 | /// 2. Eventually we call `compile()` on the thing from 1 27 | /// 3. `compile()` creates the `Inductive`'s related declarations, 28 | /// introduction, and elimination rules. Introduction rules have 29 | /// two instances; one is `CompiledIntro`, which has intermediate info 30 | /// used for checking the introduction, and the other is the persistent 31 | /// `Declaration` spun off from an introduction rule that actually persists 32 | /// in the environment after typechecking is done, and is a member 33 | /// of the eventual `CompiledInductive`. 34 | /// Formation of the reduction rules is done by the constructors for 35 | /// `ReductionRule`, though there is special handling for k-like reduction 36 | /// which is discussed more below. 37 | /// 38 | /// Many of the functions defined on `Inductive` are just defined to 39 | /// pull them out of the body of `compile()` to keep it from just being 40 | /// a giant list of instructions. Most of them are only called once per 41 | /// inductive and could just as easily be placed inline. 42 | 43 | 44 | #[derive(Debug, Clone)] 45 | pub struct ProtoInd { 46 | pub name: Name, 47 | pub params: Arc>, 48 | pub ty: Expr, 49 | pub num_params: usize, 50 | pub intros: Vec<(Name, Expr)>, 51 | } 52 | 53 | #[derive(Debug, Clone)] 54 | pub struct Inductive { 55 | pub num_params: usize, 56 | pub intros: Vec<(Name, Expr)>, 57 | pub tc : Arc>, 58 | pub codomain_sort : Level, 59 | pub params_and_indices : Vec, 60 | pub use_dep_elim : bool, 61 | pub minimal_const : Expr, 62 | pub base_declaration: Declaration, 63 | } 64 | 65 | impl Inductive { 66 | pub fn new(name: Name, 67 | univ_params: Arc>, 68 | type_: Expr, 69 | num_params: usize, 70 | intros: Vec<(Name, Expr)>, 71 | env : Arc>) -> Self { 72 | 73 | let minimal_const = mk_const(name.clone(), univ_params.clone()); 74 | let base_declaration = Declaration::mk(name, univ_params, type_, None, Some(true)); 75 | 76 | let mut tc = TypeChecker::new(None, env); 77 | 78 | base_declaration.to_axiom().compile(&tc.env).add_only(&tc.env); 79 | 80 | let (codomain_expr, params_and_indices) = tc.normalize_pis(&base_declaration.ty); 81 | let codomain_sort = match codomain_expr.as_ref() { 82 | Sort(_, lvl) => lvl.clone(), 83 | owise => errors::err_normalize_pis(line!(), owise) 84 | }; 85 | 86 | let use_dep_elim = codomain_sort.maybe_nonzero(); 87 | 88 | Inductive { 89 | num_params, 90 | intros, 91 | tc : Arc::new(RwLock::new(tc)), 92 | codomain_sort, 93 | params_and_indices, 94 | use_dep_elim, 95 | minimal_const, 96 | base_declaration, 97 | } 98 | } 99 | 100 | pub fn map_tc(&self, f : impl FnOnce(&mut TypeChecker) -> T) -> T { 101 | f(&mut self.tc.write()) 102 | } 103 | 104 | pub fn get_params(&self) -> &[Expr] { 105 | &self.params_and_indices[0 .. self.num_params] 106 | } 107 | 108 | pub fn get_indices(&self) -> &[Expr] { 109 | &self.params_and_indices[self.num_params .. ] 110 | } 111 | 112 | 113 | pub fn get_name(&self) -> &Name { 114 | &self.base_declaration.name 115 | } 116 | 117 | pub fn get_univ_params(&self) -> &Vec { 118 | &self.base_declaration.univ_params.as_ref() 119 | } 120 | 121 | pub fn get_type(&self) -> &Expr { 122 | &self.base_declaration.ty 123 | } 124 | 125 | pub fn elim_into_prop(&self, compiled_intros : &Vec) -> bool { 126 | let bool1 = self.codomain_sort.maybe_zero(); 127 | let bool2 = self.intros.len() > 1; 128 | let bool3 = compiled_intros.iter().any(|intro| { 129 | intro.intro_arguments.iter().any(|arg| { 130 | !self.map_tc(|tc| tc.is_proof(arg).0) && !intro.intro_type_args.contains(arg) 131 | }) 132 | }); 133 | 134 | bool1 && (bool2 || bool3) 135 | } 136 | 137 | pub fn elim_level(&self, compiled_intros : &Vec) -> Level { 138 | if self.elim_into_prop(&compiled_intros) { 139 | mk_zero() 140 | } else { 141 | let forbidden_names = self.get_univ_params() 142 | .iter() 143 | .map(|x| x.get_param_name()) 144 | .collect::>(); 145 | let fresh_name = Name::fresh_name("l", forbidden_names); 146 | mk_param(fresh_name) 147 | } 148 | } 149 | 150 | pub fn elim_level_params(&self, elim_level : &Level) -> Arc> { 151 | if elim_level.is_param() { 152 | let lvls = seq![Some(elim_level).into_iter(), self.get_univ_params().iter()]; 153 | Arc::new(lvls) 154 | } else { 155 | Arc::new(self.get_univ_params().clone()) 156 | } 157 | } 158 | 159 | pub fn mk_motive_app(&self, e : &Expr, indices : &[Expr], motive : &Expr) -> Expr { 160 | if self.use_dep_elim { 161 | mk_app(motive.fold_apps(indices.into_iter()), e.clone()) 162 | } else { 163 | motive.fold_apps(indices.iter()) 164 | } 165 | } 166 | 167 | pub fn compile(self, env : &Arc>) -> CompiledModification { 168 | 169 | 170 | let base_type_folded_w_params = (&self.minimal_const).fold_apps(self.get_params().into_iter()); 171 | let base_type_folded_w_params_and_indices = &self.minimal_const.fold_apps(self.params_and_indices.iter()); 172 | 173 | let compiled_intros = 174 | self.intros.iter().map(|(intro_name, raw_intro_type)| { 175 | CompiledIntro::new(&self, 176 | raw_intro_type, 177 | intro_name, 178 | &base_type_folded_w_params) 179 | }).collect::>(); 180 | 181 | 182 | let elim_level = self.elim_level(&compiled_intros); 183 | let elim_level_params = self.elim_level_params(&elim_level); 184 | let sort_of_elim_lvl = mk_sort(elim_level); 185 | 186 | let motive_type = if self.use_dep_elim { 187 | let lc = mk_local(Name::from("c"), 188 | base_type_folded_w_params_and_indices.clone(), 189 | BinderStyle::Default); 190 | sort_of_elim_lvl.fold_pis(self.get_indices().into_iter().chain(Some(&lc))) 191 | } else { 192 | sort_of_elim_lvl.fold_pis(self.get_indices().into_iter()) 193 | }; 194 | 195 | let motive = mk_local(Name::from("C"), motive_type, BinderStyle::Implicit); 196 | 197 | // Motive is the reason why you can't set it from the start. 198 | let intro_minors = compiled_intros.iter().map(|intro| { 199 | intro.mk_intro_minor_premise(&motive) 200 | }).collect::>(); 201 | 202 | let major_premise = 203 | mk_local(Name::from("x"), 204 | base_type_folded_w_params_and_indices.clone(), 205 | BinderStyle::Default); 206 | 207 | let elim_type_args = seq![&self.get_params(), 208 | Some(&motive), 209 | &intro_minors, 210 | &self.get_indices(), 211 | Some(&major_premise)]; 212 | 213 | let elim_type = self.mk_motive_app(&major_premise, 214 | self.get_indices(), 215 | &motive).fold_pis(elim_type_args.iter()); 216 | 217 | let elim_declaration = Declaration::mk( 218 | self.get_name().extend_str("rec"), 219 | elim_level_params.clone(), 220 | elim_type, 221 | None, 222 | Some(true) 223 | ); 224 | 225 | // The 'flag' for whether you're going to end up using a k value is : 226 | // `compiled_intros` has only one element `e`, 227 | // AND the intro_arguments of `e` are empty 228 | let detect_k = compiled_intros.len() == 1 229 | && compiled_intros.get(0) 230 | .map(|intro| intro.intro_arguments.is_empty()) 231 | .unwrap_or(false); 232 | 233 | let k_intro_rule = if detect_k { 234 | let k_intro_arg2 = 235 | mk_const(elim_declaration.name.clone(), 236 | elim_level_params.clone()) 237 | .fold_apps(elim_type_args.iter()); 238 | 239 | let k_intro_arg3 = intro_minors[0].clone(); 240 | 241 | let k_intro_arg4 = compiled_intros[0] 242 | .intro_type_args 243 | .iter() 244 | .zip(self.params_and_indices.iter()) 245 | .filter(|(a, b)| a != b); 246 | 247 | let rr = ReductionRule::new_nondef_rr( 248 | elim_type_args.as_slice(), 249 | k_intro_arg2, 250 | k_intro_arg3, 251 | k_intro_arg4, 252 | ); 253 | Some(rr) 254 | } else { 255 | None 256 | }; 257 | 258 | 259 | let intro_declarations = 260 | compiled_intros 261 | .iter() 262 | .map(|intro| { 263 | Declaration::mk( 264 | intro.intro_name.clone(), 265 | Arc::new(self.get_univ_params().clone()), 266 | intro.raw_type.clone(), 267 | None, 268 | Some(true) 269 | ) 270 | }).collect::>(); 271 | 272 | let reduction_rules = if let Some(k_intro) = k_intro_rule { 273 | vec![k_intro] 274 | } else { 275 | compiled_intros.iter() 276 | .enumerate() 277 | .map(|(intro_minors_idx, intro)| intro.mk_reduction_rule( 278 | intro_minors_idx, 279 | &intro_minors, 280 | &motive, 281 | &elim_declaration.name, 282 | &elim_declaration.univ_params, 283 | )).collect::>() 284 | }; 285 | 286 | for i in compiled_intros.iter() { 287 | i.check_intro(env) 288 | } 289 | 290 | // We want to be able to drop non-essential 291 | // info about the original inductive and intro rules 292 | // before we reach the function boundary and return 293 | // the `CompiledInductive` item. This is also what lets 294 | // us take `parent` by reference in CompiledIntro. 295 | 296 | CompiledModification::CompiledInductive(self.base_declaration, 297 | intro_declarations, 298 | elim_declaration, 299 | reduction_rules) 300 | } 301 | } 302 | 303 | 304 | #[derive(Debug)] 305 | pub struct CompiledIntro<'p> { 306 | pub parent : &'p Inductive, 307 | pub intro_name : Name, 308 | pub intro_arguments : Vec, 309 | pub intro_type : Expr, 310 | pub raw_type : Expr, 311 | pub intro_arg_data : Vec, 312 | pub intro_type_args : Vec, 313 | } 314 | 315 | type ArgData = Either, Vec)>; 316 | 317 | impl<'p> CompiledIntro<'p> { 318 | pub fn new(parent : &'p Inductive, 319 | raw_intro_type : &Expr, 320 | intro_name : &Name, 321 | ind_ty_w_params : &Expr) -> Self { 322 | 323 | let instd_pi = parent.map_tc(|tc| tc.instantiate_pis(raw_intro_type, parent.get_params())); 324 | let (fn_f, arguments) = parent.map_tc(|tc| tc.normalize_pis(&instd_pi)); 325 | let (new_intro_type, intro_type_args) = fn_f.unfold_apps_special(); 326 | 327 | let all_arg_infos = arguments.iter().map(|arg| { 328 | if let Local(.., binding) = arg.as_ref() { 329 | let (fn_, binders) = parent.map_tc(|tc| tc.normalize_pis(&binding.ty)); 330 | let (rec_arg_ind_ty, rec_args) = fn_.unfold_apps_special(); 331 | 332 | match rec_arg_ind_ty.as_ref() { 333 | Const(_, name, _) if name == parent.get_name() => { 334 | assert!(rec_args.len() >= parent.num_params); 335 | let (rec_args_lhs, rec_args_rhs) = rec_args.split_at(parent.num_params); 336 | parent.map_tc(|tc| { 337 | tc.require_def_eq(&rec_arg_ind_ty.fold_apps(rec_args_lhs), 338 | ind_ty_w_params); 339 | }); 340 | Right((binders, rec_args_rhs.to_vec())) 341 | }, 342 | _ => Left(arg.clone()) 343 | } 344 | } else { 345 | Left(arg.clone()) 346 | } 347 | }).collect::>(); 348 | 349 | let compiled_intro = CompiledIntro { 350 | parent, 351 | intro_name : intro_name.clone(), 352 | intro_arguments : arguments, 353 | intro_type : new_intro_type, 354 | raw_type : raw_intro_type.clone(), 355 | intro_arg_data : all_arg_infos, 356 | intro_type_args : intro_type_args, 357 | }; 358 | 359 | compiled_intro 360 | 361 | } 362 | 363 | // Create a declaration's inductive hypotheses 364 | pub fn ihs(&self, motive : &Expr) -> Vec { 365 | self.intro_arguments.iter().zip(&self.intro_arg_data).filter_map(|(a, b)| { 366 | match b { 367 | Right((v1, v2)) => { 368 | let apps = a.fold_apps(v1); 369 | let motive_app = self.parent.mk_motive_app(&apps, &v2, &motive); 370 | let pis = motive_app.fold_pis(v1.iter()); 371 | Some(mk_local(Name::from("ih"), pis, BinderStyle::Default)) 372 | }, 373 | _ => None 374 | } 375 | }).collect() 376 | } 377 | 378 | pub fn mk_intro_minor_premise(&self, motive : &Expr) -> Expr { 379 | let params_and_args = seq![self.parent.get_params(), &self.intro_arguments]; 380 | let lhs_const = mk_const(self.intro_name.clone(), self.parent.get_univ_params().clone()); 381 | let lhs_app = lhs_const.fold_apps(params_and_args.iter()); 382 | let motive_app = self.parent.mk_motive_app(&lhs_app, 383 | &self.intro_type_args[self.parent.num_params..], 384 | &motive); 385 | let args_and_ihs = seq![&self.intro_arguments, self.ihs(motive)]; 386 | let pis = motive_app.fold_pis(args_and_ihs.iter()); 387 | let hypothesis_binding = Binding::mk(Name::from("h"), pis, BinderStyle::Default); 388 | hypothesis_binding.as_local() 389 | } 390 | 391 | 392 | pub fn recursive_calls(&self, 393 | motive : &Expr, 394 | minor_premises : &Vec, 395 | elim_declar_name : &Name, 396 | elim_level_params : &Vec) -> Vec { 397 | let mut results_vec = Vec::with_capacity(self.intro_arguments.len().max(self.intro_arg_data.len())); 398 | 399 | for (rec_arg, x) in self.intro_arguments.clone().into_iter().zip(self.intro_arg_data.clone()) { 400 | match x { 401 | Right((eps, rec_arg_indices)) => { 402 | let apps_rhs = seq![self.parent.get_params(), 403 | Some(motive), 404 | &minor_premises, 405 | &rec_arg_indices, 406 | Some(rec_arg.fold_apps(eps.iter()))]; 407 | let apps_lhs = mk_const(elim_declar_name.clone(), elim_level_params.clone()); 408 | let fold_result = apps_lhs.fold_apps(apps_rhs.iter()); 409 | results_vec.push(fold_result.fold_lambdas(eps.iter())); 410 | }, 411 | _ => continue 412 | } 413 | } 414 | 415 | results_vec 416 | } 417 | 418 | // `intro_idx` is just the position of this particular intro 419 | // rule in the `intro_minors` seq 420 | pub fn mk_reduction_rule(&self, 421 | intro_minors_idx : usize, 422 | intro_minors : &Vec, 423 | motive : &Expr, 424 | elim_declar_name : &Name, 425 | elim_level_params : &Vec) -> ReductionRule { 426 | 427 | 428 | let rr_arg1 = seq![self.parent.get_params(), 429 | Some(motive), 430 | &intro_minors, 431 | &self.parent.get_indices(), 432 | &self.intro_arguments]; 433 | let fold_initial_val = mk_const(self.intro_name.clone(), 434 | self.parent.get_univ_params().clone()); 435 | let fold_list = seq![self.parent.get_params(), &self.intro_arguments]; 436 | let tail_apps = fold_initial_val.fold_apps(fold_list.iter()); 437 | 438 | let app_rhs = seq![self.parent.get_params(), 439 | Some(motive), 440 | &intro_minors, 441 | &self.parent.get_indices(), 442 | Some(tail_apps)]; 443 | let const_2 = mk_const(elim_declar_name.clone(), elim_level_params.clone()); 444 | let rr_arg2 = const_2.fold_apps(app_rhs.iter()); 445 | 446 | let rec_calls = self.recursive_calls(motive, intro_minors, elim_declar_name, elim_level_params); 447 | 448 | let rr_arg3 = intro_minors[intro_minors_idx].fold_apps(seq![&self.intro_arguments, rec_calls].iter()); 449 | 450 | ReductionRule::new_nondef_rr(rr_arg1.as_slice(), 451 | rr_arg2, 452 | rr_arg3, 453 | None.into_iter()) 454 | } 455 | 456 | 457 | // check an introduction rule 458 | pub fn check_intro(&self, env : &Arc>) { 459 | assert!(self.intro_type_args.len() >= self.parent.num_params); 460 | let req_lhs_rhs = self.intro_type_args.iter().take(self.parent.num_params); 461 | 462 | let req_lhs = self.intro_type.fold_apps(req_lhs_rhs); 463 | let req_rhs = self.parent.minimal_const.fold_apps(self.parent.get_params().into_iter()); 464 | self.parent.map_tc(|tc| tc.require_def_eq(&req_lhs, &req_rhs)); 465 | 466 | // ATTN 467 | let mut tc0 = TypeChecker::new(None, env.clone()); 468 | 469 | for elem in self.intro_arg_data.iter() { 470 | match elem { 471 | Left(e) => { 472 | let infd1 = tc0.infer(e); 473 | tc0.infer_universe_of_type(&infd1); 474 | }, 475 | Right((eps, _)) => { 476 | for e in eps { 477 | let inferred = tc0.infer(e); 478 | tc0.infer_universe_of_type(&inferred); 479 | } 480 | } 481 | } 482 | } 483 | 484 | if self.parent.codomain_sort.maybe_nonzero() { 485 | for arg in self.intro_arguments.iter() { 486 | let inferred = self.parent.map_tc(|tc| tc.infer(arg)); 487 | let arg_level = self.parent.map_tc(|tc| tc.infer_universe_of_type(&inferred)); 488 | assert!(arg_level.leq(&self.parent.codomain_sort)); 489 | } 490 | } 491 | } 492 | } 493 | 494 | -------------------------------------------------------------------------------- /src/level.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use hashbrown::HashSet; 3 | 4 | use crate::name::Name; 5 | use crate::errors; 6 | 7 | use InnerLevel::*; 8 | 9 | /// `Level` and `InnerLevel` together represent Lean's Sort/Universe level terms. 10 | /// Structurally, they're just trees, with `Level` acting as a reference counted 11 | /// wrapper around `InnerLevel`. Zero and Param values are always leaves; 12 | /// Zero is just Zero, and Param represents a variable by wrapping a `Name` value 13 | /// (like when you see `Sort u` in Lean). 14 | /// Succ is just like nat's succ, with the predecessor it points to as its only child. 15 | /// `Max` is a node which always has two children, and represents the eventual 16 | /// maximum of two `Level` values, which we can't always immediately resolve due to 17 | /// the presence of variables (Params). 18 | /// `IMax` is a `Max` node with one special behavior; any time the right hand branch 19 | /// of an `IMax` resolves to `Zero`, the whole term resolves to `Zero`. 20 | /// This behavior has to do with correctly handling `Prop`, which you can read more 21 | /// about in 'Theorem Proving in Lean' 22 | #[derive(Clone, PartialEq, Eq, Hash)] 23 | pub struct Level(Arc); 24 | 25 | #[derive(Clone, PartialEq, Eq, Hash)] 26 | pub enum InnerLevel { 27 | Zero, 28 | Succ (Level), 29 | Max (Level, Level), 30 | IMax (Level, Level), 31 | Param(Name), 32 | } 33 | 34 | pub fn mk_zero() -> Level { 35 | Level(Arc::new(InnerLevel::Zero)) 36 | } 37 | 38 | pub fn mk_max(lhs : Level, rhs : Level) -> Level { 39 | Level(Arc::new(Max(lhs, rhs))) 40 | } 41 | 42 | pub fn mk_imax(lhs : Level, rhs : Level) -> Level { 43 | Level(Arc::new(IMax(lhs, rhs))) 44 | } 45 | 46 | pub fn mk_imax_refs(lhs : &Level, rhs : &Level) -> Level { 47 | Level(Arc::new(IMax(lhs.clone(), rhs.clone()))) 48 | } 49 | 50 | pub fn mk_param(n : impl Into) -> Level { 51 | Level(Arc::new(Param(n.into()))) 52 | } 53 | 54 | pub fn mk_succ(l : Level) -> Level { 55 | Level(Arc::new(Succ(l))) 56 | } 57 | 58 | impl Level { 59 | pub fn get_param_name(&self) -> &Name { 60 | match self.as_ref() { 61 | Param(n) => n, 62 | owise => errors::err_param_name(line!(), owise) 63 | } 64 | } 65 | 66 | pub fn is_param(&self) -> bool { 67 | match self.as_ref() { 68 | Param(..) => true, 69 | _ => false 70 | } 71 | } 72 | 73 | pub fn is_any_max(&self) -> bool { 74 | match self.as_ref() { 75 | Max(..) | IMax(..) => true, 76 | _ => false 77 | } 78 | } 79 | 80 | /// A non-naive way of combining two `Level` values (naive would be just 81 | /// creating a Max). gets used in `simplify`. 82 | pub fn combining(&self, other : &Level) -> Self { 83 | match (self.as_ref(), other.as_ref()) { 84 | (Zero, _) => other.clone(), 85 | (_, Zero) => self.clone(), 86 | (Succ(lhs), Succ(rhs)) => mk_succ(lhs.combining(rhs)), 87 | _ => mk_max(self.clone(), other.clone()) 88 | 89 | } 90 | } 91 | 92 | /// Brief simplification procedure mostly aimed at simplifying IMax terms 93 | /// (the rule about an IMax with a right hand side of Zero becoming Zero 94 | /// is enforced here). 95 | pub fn simplify(&self) -> Level { 96 | match self.as_ref() { 97 | Zero | Param(..) => self.clone(), 98 | Succ(lvl) => mk_succ(lvl.simplify()), 99 | Max(a, b) => mk_max(a.simplify(), b.simplify()), 100 | IMax(a, b) => { 101 | let b_prime = b.simplify(); 102 | match b_prime.as_ref() { 103 | Zero => mk_zero(), 104 | Succ(..) => a.simplify().combining(&b_prime), 105 | _ => mk_imax(a.simplify(), b_prime) 106 | } 107 | } 108 | } 109 | } 110 | 111 | /// Given a `Level` `L`, and a mapping of `Level::Param |-> Level` `M`, traverse 112 | /// `L` and execute : 113 | /// for each node `n` in `L` 114 | /// if `n` is a Param, and `M` contains a mapping `n |-> x`, replace `n` with `x` 115 | pub fn instantiate_lvl(&self, substs : &Vec<(Level, Level)>) -> Level { 116 | match self.as_ref() { 117 | Zero => mk_zero(), 118 | Succ(inner) => mk_succ(inner.instantiate_lvl(substs)), 119 | Max(a, b) => { 120 | let a_prime = a.instantiate_lvl(substs); 121 | let b_prime = b.instantiate_lvl(substs); 122 | mk_max(a_prime, b_prime) 123 | }, 124 | IMax(a, b) => { 125 | let a_prime = a.instantiate_lvl(substs); 126 | let b_prime = b.instantiate_lvl(substs); 127 | mk_imax(a_prime, b_prime) 128 | }, 129 | Param(..) => { 130 | substs.iter() 131 | .find(|(l, _)| l == self) 132 | .map(|(_, r)| r.clone()) 133 | .unwrap_or_else(|| self.clone()) 134 | } 135 | } 136 | } 137 | 138 | 139 | 140 | /// This is used in `leq_core` to handle the case where one of the levels in question 141 | /// is an IMax whose right hand side is some paramter `P`. In light of the 142 | /// special behvior of the right hand side of an IMax term, we need to essentially 143 | /// do case analysis on our terms before we can say for sure whether lhs <= rhs. 144 | /// The cases we need to consider are : 145 | /// 1. `P` will eventually be instantiated as `Zero` 146 | /// 2. `P` will eventually be instantiated as some non-zero level. 147 | /// 148 | /// So, we create two substitutions, and check that `leq` is true for both. 149 | ///```pseudo 150 | /// let (lhs', rhs') = (lhs[Zero/P], rhs[Zero/P]) 151 | /// let (lhs'', rhs'') = (lhs[Succ(P)/P], rhs[Succ(P)/P]) 152 | /// return (lhs' ≤ rhs') && (lhs'' ≤ rhs'') 153 | ///``` 154 | pub fn ensure_imax_leq(&self, lhs : &Level, rhs : &Level, diff : i32) -> bool { 155 | assert!(self.is_param()); 156 | 157 | let zero_map = vec![(self.clone(), mk_zero())]; 158 | let nonzero_map = vec![(self.clone(), mk_succ(self.clone()))]; 159 | 160 | 161 | let closure = |subst : &Vec<(Level, Level)>, left : &Level, right : &Level| { 162 | let left_prime = left.instantiate_lvl(subst).simplify(); 163 | let right_prime = right.instantiate_lvl(subst).simplify(); 164 | left_prime.leq_core(&right_prime, diff) 165 | }; 166 | 167 | closure(&zero_map, lhs, rhs) 168 | && 169 | closure(&nonzero_map, lhs, rhs) 170 | } 171 | 172 | /// Essentially just a big analysis of different cases to determine (in the 173 | /// presence of variables and IMax's weirdness) whether the left hand side 174 | /// is less than or equal to the right hand side (using the ordering specific to 175 | /// Lean's sort terms, not the `Ord` instance Rust would use). 176 | /// `diff` is just a way of tracking applications of `Succ(x)` as we unroll 177 | /// both sides in each recursive call. 178 | pub fn leq_core(&self, other : &Level, diff : i32) -> bool { 179 | 180 | match (self.as_ref(), other.as_ref()) { 181 | (Zero, _) if diff >= 0 => true, 182 | (_, Zero) if diff < 0 => false, 183 | (Param(a), Param(x)) => a == x && diff >= 0, 184 | (Param(..), Zero) => false, 185 | (Zero, Param(..)) => diff >= 0, 186 | 187 | (Succ(s), _) => s.leq_core(other, diff - 1), 188 | (_, Succ(s)) => self.leq_core(s, diff + 1), 189 | 190 | (Max(a, b), _) => a.leq_core(other, diff) 191 | && b.leq_core(other, diff), 192 | 193 | (Param(..), Max(x, y)) => self.leq_core(x, diff) 194 | || self.leq_core(y, diff), 195 | 196 | (Zero, Max(x, y)) => self.leq_core(x, diff) 197 | || self.leq_core(y, diff), 198 | 199 | (IMax(a, b), IMax(x, y)) if a == x 200 | && b == y => true, 201 | 202 | (IMax(.., b), _) if b.is_param() => b.ensure_imax_leq(self, other, diff), 203 | 204 | (_, IMax(.., y)) if y.is_param() => y.ensure_imax_leq(self, other, diff), 205 | 206 | (IMax(a, b), _) if b.is_any_max() => match b.as_ref() { 207 | IMax(x, y) => { 208 | let new_max = mk_max(mk_imax_refs(a, y), 209 | mk_imax_refs(x, y)); 210 | Level::leq_core(&new_max, other, diff) 211 | }, 212 | 213 | Max(x, y) => { 214 | let new_max = mk_max(mk_imax_refs(a, x), 215 | mk_imax_refs(a, y)).simplify(); 216 | Level::leq_core(&new_max, other, diff) 217 | 218 | }, 219 | _ => unreachable!(), 220 | } 221 | 222 | (_, IMax(x, y)) if y.is_any_max() => match y.as_ref() { 223 | IMax(j, k) => { 224 | let new_max = mk_max(mk_imax_refs(x, k), 225 | mk_imax_refs(j, k)); 226 | self.leq_core(&new_max, diff) 227 | }, 228 | Max(j, k) => { 229 | let new_max = mk_max(mk_imax_refs(x, j), 230 | mk_imax_refs(x, k)).simplify(); 231 | self.leq_core(&new_max, diff) 232 | }, 233 | _ => unreachable!(), 234 | } 235 | _ => unreachable!() 236 | } 237 | } 238 | 239 | /// Outward-facing function that uses `leq_core` to determine whether for two 240 | /// levels `L1` and `L2`, `L1 <= L2` using Lean's definition of order on 241 | /// universes, not Rust's definition of order on `Level` terms. 242 | pub fn leq(&self, other : &Level) -> bool { 243 | self.simplify().leq_core(&other.simplify(), 0) 244 | } 245 | 246 | /// Uses antisymmetry to determine whether two levels are equal (according 247 | /// to Lean's rules for sorts) 248 | ///```pseudo 249 | ///(x ≤ y ∧ y ≤ x) → x = y 250 | ///``` 251 | pub fn eq_by_antisymm(&self, other : &Level) -> bool { 252 | let l1 = self.simplify(); 253 | let l2 = other.simplify(); 254 | 255 | l1.leq_core(&l2, 0) && l2.leq_core(&l1, 0) 256 | } 257 | 258 | /// There is no level strictly less than Zero, so for any level `L`, if `L` is 259 | /// less than or equal to Zero, it must be that L is equal to Zero. 260 | ///```pseudo 261 | /// `∀ (L : Level), (L ≤ Zero) ∧ (¬ ∃ L' : Level, L' < Zero) → L = Zero` 262 | ///``` 263 | pub fn is_zero(&self) -> bool { 264 | self.leq(&mk_zero()) 265 | } 266 | 267 | /// for any level `L`, if Succ (Zero) is less than or equal to `L`, it cannot be 268 | /// that L is equal to Zero 269 | ///```pseudo 270 | /// ∀ (L : Level), S (Zero) ≤ L → L ≠ 0 271 | ///``` 272 | pub fn is_nonzero(&self) -> bool { 273 | mk_succ(mk_zero()).leq(self) 274 | } 275 | 276 | pub fn maybe_zero(&self) -> bool { 277 | !self.is_nonzero() 278 | } 279 | 280 | pub fn maybe_nonzero(&self) -> bool { 281 | !self.is_zero() 282 | } 283 | 284 | pub fn to_offset(&self) -> (usize, &Level) { 285 | let (mut succs, mut inner) = (0usize, self); 286 | 287 | while let Succ(x) = inner.as_ref() { 288 | succs += 1; 289 | inner = x; 290 | } 291 | 292 | return (succs, inner) 293 | } 294 | } 295 | 296 | 297 | pub fn unique_univ_params<'l, 's>(lvl : &'l Level) -> HashSet<&'l Level> { 298 | let mut acc = HashSet::with_capacity(40); 299 | unique_univ_params_core(lvl, &mut acc); 300 | acc 301 | } 302 | 303 | pub fn unique_univ_params_core<'l, 's>(lvl : &'l Level, acc : &'s mut HashSet<&'l Level>) { 304 | match lvl.as_ref() { 305 | Zero => (), 306 | Succ(lvl) => unique_univ_params_core(lvl, acc), 307 | | Max(lhs, rhs) 308 | | IMax(lhs, rhs) => { 309 | unique_univ_params_core(lhs, acc); 310 | unique_univ_params_core(rhs, acc); 311 | }, 312 | Param(..) => { 313 | acc.insert(lvl); 314 | } 315 | } 316 | } 317 | 318 | 319 | 320 | impl std::convert::AsRef for Level { 321 | fn as_ref(&self) -> &InnerLevel { 322 | match self { 323 | Level(x) => x.as_ref() 324 | } 325 | } 326 | } 327 | 328 | impl From> for Level { 329 | fn from(x : Arc) -> Level { 330 | Level(x) 331 | } 332 | } 333 | 334 | impl From for Level { 335 | fn from(x : InnerLevel) -> Level { 336 | Level(Arc::new(x)) 337 | } 338 | } 339 | 340 | impl From<&str> for Level { 341 | fn from(s : &str) -> Level { 342 | mk_param(s) 343 | } 344 | } 345 | 346 | impl std::fmt::Debug for Level { 347 | fn fmt(&self, f : &mut std::fmt::Formatter) -> std::fmt::Result { 348 | write!(f, "{:?}", self.as_ref()) 349 | } 350 | } 351 | 352 | impl std::fmt::Debug for InnerLevel { 353 | fn fmt(&self, f : &mut std::fmt::Formatter) -> std::fmt::Result { 354 | match self { 355 | Zero => write!(f, "Zero"), 356 | Succ(_) => { 357 | let outer = Level::from(self.clone()); 358 | let (succs, inner) = outer.to_offset(); 359 | let s = if inner.is_zero() { 360 | format!("Sort {}", succs) 361 | } else { 362 | format!("{} + {:?}", succs, inner) 363 | }; 364 | 365 | write!(f, "{}", s) 366 | } 367 | Max(lhs, rhs) => write!(f, "Max({:?}, {:?})", lhs, rhs), 368 | IMax(lhs, rhs) => write!(f, "IMax({:?}, {:?})", lhs, rhs), 369 | Param(n) => write!(f, "Param({:?})", n) 370 | } 371 | } 372 | } 373 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #![forbid(unsafe_code)] 2 | #![allow(unused_parens)] 3 | #![allow(non_snake_case)] 4 | 5 | use std::sync::Arc; 6 | use std::time::SystemTime; 7 | 8 | use crossbeam_utils::thread; 9 | 10 | use parking_lot::RwLock; 11 | 12 | use structopt::StructOpt; 13 | 14 | use crate::env::Env; 15 | use crate::parser::LineParser; 16 | use crate::utils::{ Either::*, RwQueue, ModQueue, CompiledQueue, END_MSG_CHK }; 17 | use crate::cli::{ Opt, pp_bundle }; 18 | 19 | pub mod utils; 20 | pub mod errors; 21 | pub mod name; 22 | pub mod level; 23 | pub mod expr; 24 | pub mod reduction; 25 | pub mod tc; 26 | pub mod env; 27 | pub mod quot; 28 | pub mod inductive; 29 | pub mod parser; 30 | pub mod pretty; 31 | pub mod cli; 32 | 33 | 34 | #[cfg(feature = "mimalloc")] 35 | #[global_allocator] 36 | static GLOBAL: mimallocator::Mimalloc = mimallocator::Mimalloc; 37 | 38 | // By default, make the 'modifications' hashmap large enough to accomodate 39 | // core + ~2000 items (core is about 9000 items). If the passed export file 40 | // has more modifications, the hashmap will just resize, but that's a 41 | // (relatively) costly operation. 42 | pub const EXPECTED_NUM_MODS : usize = 11_000; 43 | 44 | pub const WARN_OUTDATED_HEADER : &str = "\n\n !! This repository has been archived!!"; 45 | pub const WARN_OUTDATED : &str = 46 | "\n 47 | It is out of date and no longer updated or maintained. 48 | Current development is continuing in the form of `nanoda_lib` : 49 | https://github.com/ammkrn/nanoda_lib.git 50 | which has a sample binary in its examples directory. If you're SURE 51 | you want to keep using this verison, pass the command line flag 52 | `-f` or `--force`, IE `./nanoda --force export.out\n"; 53 | 54 | fn main() { 55 | 56 | let opt = Opt::from_args(); 57 | 58 | if (!opt.force) { 59 | println!("{}", WARN_OUTDATED_HEADER); 60 | println!("{}", WARN_OUTDATED); 61 | std::process::exit(-1); 62 | } 63 | 64 | 65 | if opt.debug { 66 | println!("CLI returned these arguments : {:#?}", opt); 67 | } 68 | 69 | let export_file_strings = match opt.try_read_files() { 70 | Ok(strings) => strings, 71 | Err(e) => errors::export_file_parse_err(line!(), e) 72 | }; 73 | 74 | let start_instant = SystemTime::now(); 75 | 76 | let mut num_checked = 0usize; 77 | match opt.num_threads { 78 | 0 | 1 => for s in export_file_strings { 79 | num_checked += check_serial(s, opt.print); 80 | } 81 | owise => for s in export_file_strings { 82 | num_checked += check_parallel(s, owise as usize, opt.print) 83 | } 84 | } 85 | 86 | match start_instant.elapsed() { 87 | Ok(dur) => println!("\n### Finished checking {} items in {:?}; to the best \ 88 | of our knowledge, all terms were well-typed! ###\n", num_checked, dur), 89 | Err(e) => println!("\n### Finished checking {} items; to the best of our \ 90 | knowledge, all terms were well-typed!\n I wasn't able to time \ 91 | execution though; here was the error : {} ###", num_checked, e) 92 | } 93 | 94 | } 95 | 96 | 97 | fn check_serial(source : String, print : bool) -> usize { 98 | let env = Arc::new(RwLock::new(Env::new(EXPECTED_NUM_MODS))); 99 | let add_queue = RwQueue::with_capacity(EXPECTED_NUM_MODS); 100 | let check_queue = RwQueue::with_capacity(EXPECTED_NUM_MODS); 101 | 102 | if let Err(e) = LineParser::parse_all(source, &add_queue, &env) { 103 | errors::export_file_parse_err(line!(), e) 104 | } 105 | 106 | loop_add(&add_queue, &check_queue, &env, 1); 107 | loop_check(&check_queue, &env); 108 | 109 | if print { 110 | pp_bundle(&env); 111 | } 112 | 113 | let n = env.read().num_declars(); 114 | n 115 | } 116 | 117 | fn check_parallel(source : String, num_threads : usize, print : bool) -> usize { 118 | let env = Arc::new(RwLock::new(Env::new(EXPECTED_NUM_MODS))); 119 | let add_queue = RwQueue::with_capacity(EXPECTED_NUM_MODS); 120 | let check_queue = RwQueue::with_capacity(EXPECTED_NUM_MODS); 121 | 122 | let scope_ = thread::scope(|s| { 123 | 124 | let mut thread_holder = Vec::with_capacity(num_threads); 125 | 126 | // add and parse can be done separately/concurrently, but both MUST be done 127 | // in order. So, when parsing ends, that thread goes immediately to 128 | // the check pool instead of adding. 129 | thread_holder.push(s.builder().stack_size(8388608).spawn(|_| { 130 | if let Err(e) = LineParser::parse_all(source, &add_queue, &env) { 131 | errors::export_file_parse_err(line!(), e) 132 | } 133 | loop_check(&check_queue, &env); 134 | }).expect("Failed to spawn scoped thread!")); 135 | 136 | 137 | thread_holder.push(s.spawn(|_s| { 138 | loop_add(&add_queue, &check_queue, &env, num_threads); 139 | loop_check(&check_queue, &env); 140 | })); 141 | 142 | // We spawn (num_threads - 2) checker threads here since 143 | // parser and adder will check when they're done. 144 | for _ in 0..(num_threads - 2) { 145 | thread_holder.push(s.spawn(|_s| { 146 | loop_check(&check_queue, &env); 147 | })); 148 | } 149 | 150 | for t in thread_holder { 151 | t.join().expect("scoped thread panicked!") 152 | } 153 | }); 154 | 155 | if scope_.is_err() { 156 | errors::scope_err(line!()) 157 | } 158 | 159 | if print { 160 | pp_bundle(&env); 161 | } 162 | 163 | let n = env.read().num_declars(); 164 | n 165 | } 166 | 167 | 168 | // Constantly poll the `add_queue` to see if there's something 169 | // to add. If the item popped off the queue is a Left(Mod), 170 | // add said mod. `None` means there aren't any items yet, but 171 | // there will be later. Right(..) means adding is finished, 172 | // and this thread is redirected to working on the `check_queue` 173 | pub fn loop_add(add_queue : &ModQueue, 174 | check_queue : &CompiledQueue, 175 | env : &Arc>, 176 | num_threads : usize) { 177 | loop { 178 | match add_queue.pop() { 179 | Some(Left(elem)) => { 180 | let compiled = elem.compile(&env); 181 | compiled.add_only(&env); 182 | check_queue.push(Left(compiled)); 183 | }, 184 | Some(Right(_)) => { 185 | for _ in 0..(num_threads * 2) { 186 | check_queue.push(END_MSG_CHK); 187 | } 188 | break 189 | }, 190 | None => continue, 191 | } 192 | } 193 | } 194 | 195 | // Same as above. Constantly poll for new work, with Left(Compiled) 196 | // indicating an item to be checked, `None` meaning 'try again later' 197 | // and Right(..) meaning all checking has completed. 198 | pub fn loop_check(check_queue : &CompiledQueue, 199 | env : &Arc>) { 200 | loop { 201 | match check_queue.pop() { 202 | Some(Left(elem)) => elem.check_only(&env), 203 | Some(Right(_)) => break, 204 | None => continue 205 | } 206 | } 207 | } 208 | 209 | -------------------------------------------------------------------------------- /src/name.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use hashbrown::HashSet; 4 | 5 | use InnerName::*; 6 | 7 | /// `Name` is an Arc wrapper for the `InnerName` enum, which together represent Lean's hierarchical names, where 8 | /// hierarchical just means "nested namespaces that can be accessed with a dot", like `nat.rec`. They have a very 9 | /// similar structure to an inductive `List` type, with `Anon`, the anonymous name acting as `Nil`, 10 | /// while `Str` and `Num` act like `cons`, but specialized to consing string and integer elements respectively. 11 | /// Name values always begin with `Anon`, and can contain any combination of `Str` and `Num` applications, 12 | /// IE (in pseudo-code) `Num n (Str s (Num n' (Str s' (Anon))))` would be a valid construction. 13 | #[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Hash)] 14 | pub struct Name(Arc); 15 | 16 | #[derive(Debug, Clone, PartialEq, PartialOrd, Ord, Eq, Hash)] 17 | pub enum InnerName { 18 | Anon, 19 | Str { pfx : Name, hd : String }, 20 | Num { pfx : Name, hd : u64 }, 21 | } 22 | 23 | pub fn mk_anon() -> Name { 24 | Name(Arc::new(InnerName::Anon)) 25 | } 26 | 27 | impl Name { 28 | 29 | pub fn is_anon(&self) -> bool { 30 | match self { 31 | Name(inner) => inner.as_ref() == &Anon 32 | } 33 | } 34 | 35 | 36 | /// Extend some hierarchical name with a string. IE `nat` => `nat.rec` 37 | pub fn extend_str(&self, hd : &str) -> Self { 38 | Name::from(Str { pfx : self.clone(), hd : String::from(hd) }) // InnerName -> Name 39 | } 40 | 41 | /// Extend some hierarchical name with an integer. IE `prod` => `prod.3` 42 | pub fn extend_num(&self, hd : u64) -> Self { 43 | Name::from(Num { pfx : self.clone(), hd : hd }) // InnerName -> Name 44 | } 45 | 46 | 47 | /// Given a suggested prefix and a set of names we want to avoid collisions with, 48 | /// extend the suggestion with an incrementing integer until we get a name that doesn't collide with 49 | /// any of the names given in `forbidden`. This implementation relies on the laziness of iterators. 50 | pub fn fresh_name(suggested : &str, forbidden : HashSet<&Name>) -> Self { 51 | let base = Name::from(suggested); 52 | if !forbidden.contains(&base) { 53 | return base 54 | } 55 | (0u64..).into_iter() 56 | .map(|n| base.extend_num(n)) 57 | .filter(|candidate| !forbidden.contains(candidate)) 58 | .next() 59 | .unwrap() 60 | 61 | } 62 | 63 | } 64 | 65 | 66 | 67 | 68 | /// Convenience function to get the `InnerName` from a `Name` 69 | impl std::convert::AsRef for Name { 70 | fn as_ref(&self) -> &InnerName { 71 | match self { 72 | Name(x) => x.as_ref() 73 | } 74 | } 75 | } 76 | 77 | /// Convenience function for converting an Arc into its newtype `Name` 78 | impl From> for Name { 79 | fn from(x : Arc) -> Name { 80 | Name(x) 81 | } 82 | } 83 | // Convenience function for converting an InnerName to a Name 84 | impl From for Name { 85 | fn from(x : InnerName) -> Name { 86 | Name(Arc::new(x)) 87 | } 88 | } 89 | 90 | /// Creates a Name value from a string slice. 91 | impl From<&str> for Name { 92 | fn from(s : &str) -> Name { 93 | mk_anon().extend_str(s) 94 | } 95 | } 96 | 97 | 98 | /// Hierarchical names should display from left to right, with a `.` separating elements, and the anonymous name 99 | /// should display as an empty string. 100 | /// IE the formatted version of Anon ++ Str(list) ++ Str(cases_on) ++ Num(777) should display as 101 | /// `list.cases_on.777` 102 | 103 | 104 | impl std::fmt::Debug for Name { 105 | fn fmt(&self, f : &mut std::fmt::Formatter) -> std::fmt::Result { 106 | match self.as_ref() { 107 | Anon => write!(f, "Anon"), 108 | Str { pfx, hd } => write!(f, "{:?} :: {:?}", pfx, hd), 109 | Num { pfx, hd } => write!(f, "{:?} :: {:?}", pfx, hd), 110 | } 111 | } 112 | } 113 | 114 | 115 | impl std::fmt::Display for InnerName { 116 | fn fmt(&self, f : &mut std::fmt::Formatter) -> std::fmt::Result { 117 | match self { 118 | Anon => write!(f, ""), 119 | Str { pfx, hd } => match pfx.as_ref() { 120 | Anon => write!(f, "{}", hd), 121 | owise => write!(f, "{}.{}", owise, hd) 122 | }, 123 | Num { pfx, hd } => match pfx.as_ref() { 124 | Anon => write!(f, "{}", hd), 125 | owise => write!(f, "{}.{}", owise, hd) 126 | } 127 | } 128 | } 129 | } 130 | 131 | impl std::fmt::Display for Name { 132 | fn fmt(&self, f : &mut std::fmt::Formatter) -> std::fmt::Result { 133 | write!(f, "{}", self.as_ref()) 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /src/parser.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use std::str::SplitWhitespace; 3 | 4 | use crate::name::{ Name, mk_anon }; 5 | use crate::env::{ Env, Modification, Axiom, Definition }; 6 | use crate::quot::new_quot; 7 | use crate::inductive::Inductive; 8 | use crate::pretty::components::Notation; 9 | use crate::utils::{ Either::*, END_MSG_ADD, ModQueue }; 10 | use crate::errors; 11 | use crate::level::{ Level, mk_imax, mk_max, mk_succ, mk_param, mk_zero }; 12 | use crate::expr::{ Expr, Binding, BinderStyle, mk_app, mk_prop, mk_sort, 13 | mk_var, mk_let, mk_pi, mk_lambda, mk_const }; 14 | 15 | use parking_lot::RwLock; 16 | 17 | use ParseErr::*; 18 | 19 | pub type ParseResult = std::result::Result; 20 | 21 | #[derive(Debug, Clone)] 22 | pub enum ParseErr { 23 | Exhausted(usize, u32), 24 | ParseInt(usize, u32, std::num::ParseIntError), 25 | StringErr(usize, u32, String), 26 | } 27 | 28 | impl std::fmt::Display for ParseErr { 29 | fn fmt(&self, f : &mut std::fmt::Formatter) -> std::fmt::Result { 30 | match self { 31 | Exhausted(line, source) => write!(f, "Parse error at source line {}, source line {} : source iterator unexpectedly yielded None (was out of elements)", line, source), 32 | ParseInt(line, source, err) => write!(f, "Parse error at lean output line {}, source line {} : {}", line, source, err), 33 | StringErr(line, source, err) => write!(f, "Parse error at lean output line {}, source line {} : {}", line, source, err), 34 | } 35 | } 36 | } 37 | 38 | 39 | pub struct LineParser<'s> { 40 | pub line_num: usize, 41 | pub names : Vec, 42 | pub levels : Vec, 43 | pub exprs : Vec, 44 | pub queue_handle : &'s ModQueue, 45 | pub env_handle : &'s Arc>, 46 | pub prop : Expr 47 | } 48 | 49 | impl<'s> LineParser<'s> { 50 | pub fn new(queue_handle : &'s ModQueue, env_handle : &'s Arc>) -> LineParser<'s> { 51 | let mut parser = LineParser { 52 | line_num: 1usize, 53 | names : Vec::with_capacity(12_000), 54 | levels : Vec::with_capacity(250), 55 | exprs : Vec::with_capacity(400_000), 56 | queue_handle, 57 | env_handle, 58 | prop : mk_prop() 59 | 60 | }; 61 | 62 | parser.names.push(mk_anon()); 63 | parser.levels.push(mk_zero()); 64 | parser 65 | } 66 | 67 | pub fn ref_anon(&self) -> Name { 68 | self.names[0].clone() 69 | } 70 | 71 | pub fn ref_zero(&self) -> Level { 72 | self.levels[0].clone() 73 | } 74 | 75 | pub fn ref_prop(&self) -> Expr { 76 | self.prop.clone() 77 | } 78 | 79 | pub fn parse_all(s : String, queue_handle : &'s ModQueue, env_handle : &'s Arc>) -> ParseResult<()> { 80 | let mut parser = LineParser::new(queue_handle, env_handle); 81 | let mut as_lines = s.lines(); 82 | 83 | while let Some(line) = &mut as_lines.next() { 84 | match parser.try_next(line) { 85 | Ok(_) => (), 86 | Err(e) => return Err(e) 87 | } 88 | parser.line_num += 1; 89 | } 90 | 91 | parser.queue_handle.push(END_MSG_ADD); 92 | parser.queue_handle.push(END_MSG_ADD); 93 | Ok(()) 94 | } 95 | 96 | pub fn try_next(&mut self, line : &str) -> ParseResult<()> { 97 | let mut ws = line.split_whitespace(); 98 | match ws.next().ok_or(Exhausted(self.line_num, line!()))? { 99 | "#AX" => self.make_axiom(&mut ws), 100 | "#DEF" => self.make_definition(&mut ws), 101 | "#QUOT" => self.make_quotient(), 102 | "#IND" => self.make_inductive(&mut ws), 103 | s @ "#INFIX" => self.make_notation(s, line, &mut ws), 104 | s @ "#PREFIX" => self.make_notation(s, line, &mut ws), 105 | s @ "#POSTFIX" => self.make_notation(s, line, &mut ws), 106 | owise1 => { 107 | let leading_num = owise1.parse::() 108 | .map_err(|e| ParseInt(self.line_num, line!(), e))?; 109 | let mut as_chars = ws.next() 110 | .ok_or(Exhausted(self.line_num, line!()))? 111 | .chars(); 112 | assert!(as_chars.next() == Some('#')); 113 | 114 | match as_chars.next() { 115 | Some('N') => self.make_name(leading_num, as_chars.next().ok_or(Exhausted(self.line_num, line!()))?, &mut ws), 116 | Some('U') => self.make_level(leading_num, as_chars.next().ok_or(Exhausted(self.line_num, line!()))?, &mut ws), 117 | Some('E') => self.make_expr(leading_num, as_chars.next().ok_or(Exhausted(self.line_num, line!()))?, &mut ws), 118 | owise2 => return Err(StringErr(self.line_num, line!(), errors::err_parse_kind(&owise2))) 119 | } 120 | } 121 | } 122 | } 123 | 124 | 125 | fn parse_usize(&mut self, ws : &mut SplitWhitespace) -> ParseResult { 126 | ws.next() 127 | .ok_or(Exhausted(self.line_num, line!())) 128 | .and_then(|item| item.parse::().map_err(|e| ParseInt(self.line_num, line!(), e))) 129 | } 130 | 131 | fn parse_u64(&mut self, ws : &mut SplitWhitespace) -> ParseResult { 132 | ws.next() 133 | .ok_or(Exhausted(self.line_num, line!())) 134 | .and_then(|item| item.parse::().map_err(|e| ParseInt(self.line_num, line!(), e))) 135 | } 136 | 137 | 138 | fn parse_rest_usize(&mut self, ws : &mut SplitWhitespace) -> ParseResult> { 139 | ws.map(|elem| elem.parse::().map_err(|e| ParseInt(self.line_num, line!(), e))) 140 | .collect::>>() 141 | } 142 | 143 | fn parse_rest_string(&mut self, ws : &mut SplitWhitespace) -> String { 144 | ws.collect::() 145 | } 146 | 147 | pub fn get_levels(&mut self, ws : &mut SplitWhitespace) -> ParseResult> { 148 | ws.into_iter() 149 | .map(|elem| elem.parse::().map_err(|e| ParseInt(self.line_num, line!(), e))) 150 | .map(|res| res.map(|idx| self.levels.get(idx).map(|x| x).cloned().unwrap_or_else(|| self.ref_zero()))) 151 | .collect::>>() 152 | } 153 | 154 | pub fn get_uparams(&mut self, ws : &mut SplitWhitespace) -> ParseResult> { 155 | ws.into_iter() 156 | .map(|elem| elem.parse::().map_err(|e| ParseInt(self.line_num, line!(), e))) 157 | .map(|res| res.map(|idx| { 158 | let name = self.names.get(idx).cloned().unwrap_or_else(|| self.ref_anon()); 159 | mk_param(name) 160 | })) 161 | .collect::>>() 162 | } 163 | 164 | pub fn parse_binder_info(&mut self, ws : &mut SplitWhitespace) -> ParseResult { 165 | ws.next().map(|elem| match elem { 166 | s if s.contains("#BD") => BinderStyle::Default, 167 | s if s.contains("#BI") => BinderStyle::Implicit, 168 | s if s.contains("#BC") => BinderStyle::InstImplicit, 169 | s if s.contains("#BS") => BinderStyle::StrictImplicit, 170 | _ => unreachable!(), 171 | }).ok_or(Exhausted(self.line_num, line!())) 172 | } 173 | 174 | pub fn get_name(&mut self, ws : &mut SplitWhitespace) -> ParseResult { 175 | self.parse_usize(ws) 176 | .map(|idx| self.names.get(idx).map(|x| x).cloned().unwrap_or_else(|| self.ref_anon())) 177 | } 178 | 179 | 180 | pub fn get_level(&mut self, ws : &mut SplitWhitespace) -> ParseResult { 181 | self.parse_usize(ws) 182 | .map(|idx| self.levels.get(idx).map(|x| x).cloned().unwrap_or_else(|| self.ref_zero())) 183 | } 184 | 185 | pub fn get_expr(&mut self, ws : &mut SplitWhitespace) -> ParseResult { 186 | self.parse_usize(ws) 187 | .map(|idx| self.exprs.get(idx).map(|x| x).cloned().unwrap_or_else(|| self.ref_prop())) 188 | } 189 | 190 | pub fn make_name(&mut self, new_pos : usize, kind : char, ws : &mut SplitWhitespace) -> ParseResult<()> { 191 | let prefix_name = self.get_name(ws)?; 192 | let new_name = match kind { 193 | 'S' => prefix_name.extend_str(self.parse_rest_string(ws).as_str()), 194 | 'I' => self.parse_u64(ws).map(|hd| prefix_name.extend_num(hd))?, 195 | _ => unreachable!("parser line : {}", line!()) 196 | }; 197 | 198 | 199 | write_elem_strict(&mut self.names, new_name, new_pos) 200 | } 201 | 202 | 203 | pub fn make_level(&mut self, new_pos : usize, kind : char, ws : &mut SplitWhitespace) -> ParseResult<()> { 204 | 205 | let new_level = match kind { 206 | 'S' => mk_succ(self.get_level(ws)?), 207 | 'M' => mk_max(self.get_level(ws)?, self.get_level(ws)?), 208 | 'I' => mk_imax(self.get_level(ws)?, self.get_level(ws)?), 209 | 'P' => mk_param(self.get_name(ws)?), 210 | _ => unreachable!("parser line : {}", line!()) 211 | }; 212 | 213 | write_elem_strict(&mut self.levels, new_level, new_pos) 214 | } 215 | 216 | 217 | pub fn make_expr(&mut self, new_pos : usize, kind : char, ws : &mut SplitWhitespace) -> ParseResult<()> { 218 | 219 | let new_expr = match kind { 220 | 'V' => mk_var(self.parse_u64(ws)?), 221 | 'S' => mk_sort(self.get_level(ws)?), 222 | 'C' => mk_const(self.get_name(ws)?, self.get_levels(ws)?), 223 | 'A' => mk_app(self.get_expr(ws)?, self.get_expr(ws)?), 224 | 'L' => { 225 | let binder_info = self.parse_binder_info(ws)?; 226 | let binder_name = self.get_name(ws)?; 227 | let domain = self.get_expr(ws)?; 228 | let lambda = mk_lambda(Binding::mk(binder_name, domain, binder_info), self.get_expr(ws)?); 229 | lambda 230 | }, 231 | 'P' => { 232 | let binder_info = self.parse_binder_info(ws)?; 233 | let binder_name = self.get_name(ws)?; 234 | let dom = self.get_expr(ws)?; 235 | mk_pi(Binding::mk(binder_name, dom, binder_info), self.get_expr(ws)?) 236 | }, 237 | 'Z' => { 238 | let name = self.get_name(ws)?; 239 | let ty = self.get_expr(ws)?; 240 | let val = self.get_expr(ws)?; 241 | let body = self.get_expr(ws)?; 242 | mk_let(Binding::mk(name, ty, BinderStyle::Default), val, body) 243 | }, 244 | otherwise => unreachable!("parser line : {} expectex expression cue, got {:?}", line!(), otherwise) 245 | }; 246 | 247 | write_elem_strict(&mut self.exprs, new_expr, new_pos) 248 | } 249 | 250 | 251 | pub fn make_notation(&mut self, kind : &str, line : &str, ws : &mut SplitWhitespace) -> ParseResult<()> { 252 | let name = self.get_name(ws)?; 253 | let priority = self.parse_usize(ws)?; 254 | // Elegance. 255 | let symbol = line.chars().skip_while(|x| !x.is_whitespace()) 256 | .skip(1) 257 | .skip_while(|x| !x.is_whitespace()) 258 | .skip(1) 259 | .skip_while(|x| !x.is_whitespace()) 260 | .skip(1) 261 | .collect::(); 262 | let made = match kind { 263 | "#PREFIX" => Notation::new_prefix(name.clone(), priority, symbol), 264 | "#INFIX" => Notation::new_infix(name.clone(), priority, symbol), 265 | "#POSTFIX" => Notation::new_postfix(name.clone(), priority, symbol), 266 | _ => unreachable!() 267 | }; 268 | 269 | self.env_handle.write().add_notation(&name, made); 270 | Ok(()) 271 | } 272 | 273 | pub fn make_axiom(&mut self, ws : &mut SplitWhitespace) -> ParseResult<()> { 274 | let name = self.get_name(ws)?; 275 | let ty = self.get_expr(ws)?; 276 | let uparams = self.get_uparams(ws)?; 277 | let axiom = Axiom::new(name, Arc::new(uparams), ty); 278 | Ok(self.queue_handle.push(Left(Modification::AxiomMod(axiom)))) 279 | 280 | } 281 | 282 | pub fn make_definition(&mut self, ws : &mut SplitWhitespace) -> ParseResult<()> { 283 | let name = self.get_name(ws)?; 284 | let ty = self.get_expr(ws)?; 285 | let val = self.get_expr(ws)?; 286 | let uparams = self.get_uparams(ws)?; 287 | let def = Definition::new(name, Arc::new(uparams), ty, val); 288 | Ok(self.queue_handle.push(Left(Modification::DefMod(def)))) 289 | } 290 | 291 | pub fn make_quotient(&mut self) -> ParseResult<()> { 292 | self.queue_handle.push(Left(new_quot())); 293 | Ok(()) 294 | } 295 | 296 | pub fn make_inductive(&mut self, ws : &mut SplitWhitespace) -> ParseResult<()> { 297 | let num_params = self.parse_usize(ws)?; 298 | let name = self.get_name(ws)?; 299 | let ty = self.get_expr(ws)?; 300 | let num_intros = self.parse_usize(ws)?; 301 | let rest_usize = self.parse_rest_usize(ws)?; 302 | let (intros, params) = rest_usize.split_at(2 * num_intros); 303 | 304 | let param_vec = params.into_iter().map(|idx| { 305 | let fetched_name = self.names.get(*idx).cloned().unwrap_or_else(|| self.ref_anon()); 306 | mk_param(fetched_name) 307 | }).collect::>(); 308 | 309 | let mut intros_buf : Vec<(Name, Expr)> = Vec::new(); 310 | 311 | for two_slice in intros.chunks(2usize) { 312 | let name = self.names.get(two_slice[0]).cloned().unwrap_or_else(|| self.ref_anon()); 313 | let ty = self.exprs.get(two_slice[1]).cloned().unwrap_or_else(|| self.ref_prop()); 314 | intros_buf.push((name, ty)); 315 | } 316 | 317 | let ind_mod = crate::inductive::ProtoInd { name, params: Arc::new(param_vec), ty, num_params, intros: intros_buf }; 318 | Ok(self.queue_handle.push(Left(Modification::IndMod(ind_mod)))) 319 | } 320 | 321 | } 322 | 323 | 324 | // FIXME add command-line flag for strict/non-strict export file parsing. 325 | // Strict assumes that well-formed export files will not have 'holes' when filling 326 | // in comopnent arrays; IE all items will be placed consecutively. 327 | fn write_elem_strict(v : &mut Vec, new_elem : T, pos : usize) -> ParseResult<()> { 328 | assert!(v.len() == pos); 329 | match v.get_mut(pos) { 330 | Some(_) => { 331 | eprintln!("malformed export file; components should never require replacement within vectors."); 332 | std::process::exit(-1); 333 | }, 334 | None => { 335 | v.push(new_elem); 336 | } 337 | } 338 | Ok(()) 339 | } 340 | -------------------------------------------------------------------------------- /src/pretty/components.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::name::Name; 4 | use Notation::*; 5 | 6 | pub const MAX_PRIORITY : usize = 1024; 7 | #[derive(Clone, PartialEq)] 8 | pub enum Notation { 9 | // function, priority, op 10 | Prefix (Name, usize, String), 11 | Infix (Name, usize, String), 12 | Postfix (Name, usize, String), 13 | } 14 | 15 | 16 | impl Notation { 17 | pub fn new_prefix(func : Name, priority : usize, op : String) -> Self { 18 | Prefix(func, priority, op) 19 | } 20 | 21 | pub fn new_infix(func : Name, priority : usize, op : String) -> Self { 22 | Infix(func, priority, op) 23 | } 24 | 25 | pub fn new_postfix(func : Name, priority : usize, op : String) -> Self { 26 | Postfix(func, priority, op) 27 | } 28 | 29 | 30 | pub fn fn_(&self) -> &Name { 31 | match self { 32 | | Prefix ( func, .. ) 33 | | Infix ( func, .. ) 34 | | Postfix ( func, .. ) => func, 35 | } 36 | } 37 | 38 | pub fn priority(&self) -> usize { 39 | match self { 40 | | Prefix ( _, priority, _ ) 41 | | Infix ( _, priority, _ ) 42 | | Postfix ( _, priority, _ ) => *priority, 43 | } 44 | } 45 | 46 | pub fn op(&self) -> &String { 47 | match self { 48 | | Prefix ( _, _, op ) 49 | | Infix ( _, _, op ) 50 | | Postfix ( _, _, op ) => op 51 | } 52 | } 53 | } 54 | 55 | #[derive(Debug, Clone)] 56 | pub struct Doc(Arc); 57 | 58 | impl From<&String> for Doc { 59 | fn from(s : &String) -> Doc { 60 | Text(s.clone()).into() 61 | } 62 | } 63 | 64 | impl From for Doc { 65 | fn from(s : String) -> Doc { 66 | Text(s).into() 67 | } 68 | } 69 | 70 | impl From<&str> for Doc { 71 | fn from(s : &str) -> Doc { 72 | Text(String::from(s)).into() 73 | } 74 | } 75 | 76 | #[derive(Debug, Clone)] 77 | pub enum InnerDoc { 78 | Concat(Doc, Doc), 79 | Nest(usize, Doc), 80 | Text(String), 81 | Line(String), 82 | Group(Doc) 83 | } 84 | 85 | use InnerDoc::*; 86 | 87 | impl std::convert::AsRef for Doc { 88 | fn as_ref(&self) -> &InnerDoc { 89 | match self { 90 | Doc(x) => x.as_ref() 91 | } 92 | } 93 | } 94 | 95 | impl From for Doc { 96 | fn from(t : InnerDoc) -> Doc { 97 | Doc(Arc::new(t)) 98 | } 99 | } 100 | 101 | impl From<&InnerDoc> for Doc { 102 | fn from(t : &InnerDoc) -> Doc { 103 | Doc(Arc::new(t.clone())) 104 | } 105 | } 106 | 107 | impl Doc { 108 | 109 | pub fn line() -> Doc { 110 | Line(format!(" ")).into() 111 | } 112 | 113 | pub fn zero_width_line() -> Doc { 114 | Line(format!("")).into() 115 | } 116 | 117 | pub fn as_text(t : String) -> Doc { 118 | Text(t).into() 119 | } 120 | 121 | // unused 122 | //pub fn sep(&self, docs : &[Doc]) -> Doc { 123 | // let mut as_iter = docs.into_iter().cloned(); 124 | // // pull off initial element for fold 125 | // match as_iter.next() { 126 | // None => Doc::from(""), 127 | // Some(fst) => as_iter.fold(fst, |acc, next| { 128 | // self.clone().concat(next) 129 | // //let lhs = Doc::concat(acc, self.clone()); 130 | // //Doc::concat(lhs, next) 131 | // }) 132 | // } 133 | //} 134 | 135 | 136 | pub fn group(&self) -> Doc { 137 | Group(self.clone()).into() 138 | } 139 | 140 | pub fn nest(&self, idx : usize) -> Doc { 141 | Nest(idx, self.clone()).into() 142 | } 143 | 144 | 145 | 146 | 147 | pub fn flat_size(&self) -> usize { 148 | match self.as_ref() { 149 | Concat(a, b) => a.flat_size() + b.flat_size(), 150 | Nest(_, d) => d.flat_size(), 151 | Text(t) => t.len(), 152 | Line(x) => x.len(), 153 | Group(a) => a.flat_size() 154 | } 155 | } 156 | 157 | pub fn contains_line(&self) -> bool { 158 | match self.as_ref() { 159 | Line(_) => true, 160 | Concat(a, b) => a.contains_line() || b.contains_line(), 161 | Nest(_, d) => d.contains_line(), 162 | Text(_) => false, 163 | Group(a) => a.contains_line() 164 | } 165 | } 166 | 167 | pub fn dist_to_first_line(&self) -> usize { 168 | match self.as_ref() { 169 | Line(_) => 0, 170 | Concat(a, b) => a.dist_to_line(b.dist_to_first_line()), 171 | Nest(_, d) => d.dist_to_first_line(), 172 | Text(t) => t.len(), 173 | Group(a) => a.dist_to_first_line() 174 | } 175 | } 176 | 177 | pub fn dist_to_line(&self, after : usize) -> usize { 178 | if self.contains_line() { 179 | self.dist_to_first_line() 180 | } else { 181 | self.dist_to_first_line() + after 182 | } 183 | } 184 | 185 | pub fn render(self, line_width : usize) -> String { 186 | let mut acc = String::new(); 187 | let mut eol = acc.len() + line_width; 188 | 189 | self.render_core(0, false, 0, line_width, &mut eol, &mut acc); 190 | acc 191 | } 192 | 193 | pub fn render_core(&self, 194 | nest : usize, 195 | flatmode : bool, 196 | dist_to_next_line : usize, 197 | line_width : usize, 198 | eol : &mut usize, 199 | acc : &mut String) { 200 | match self.as_ref() { 201 | Concat(a, b) => { 202 | a.render_core(nest, 203 | flatmode, 204 | b.dist_to_line(dist_to_next_line), 205 | line_width, 206 | eol, 207 | acc); 208 | b.render_core(nest, flatmode, dist_to_next_line, line_width, eol, acc); 209 | }, 210 | Nest(idx, a) => { 211 | a.render_core(nest + idx, flatmode, dist_to_next_line, line_width, eol, acc); 212 | }, 213 | Text(t) => { 214 | acc.push_str(t.as_str()); 215 | }, 216 | Line(x) => { 217 | if flatmode { 218 | acc.push_str(x.as_str()); 219 | } else { 220 | assert!(!flatmode); 221 | acc.push_str("\n"); 222 | std::mem::replace(eol, (acc.len() + line_width)); 223 | for _ in 0..nest { 224 | acc.push(' '); 225 | } 226 | } 227 | }, 228 | Group(a) => { 229 | a.render_core(nest, 230 | flatmode || acc.len() + a.flat_size() + dist_to_next_line <= *eol, 231 | dist_to_next_line, 232 | line_width, 233 | eol, 234 | acc); 235 | } 236 | } 237 | } 238 | 239 | pub fn concat(self, other : impl Into) -> Doc { 240 | Concat(self, other.into()).into() 241 | } 242 | 243 | pub fn concat_line(self, other : impl Into) -> Doc { 244 | let lhs = Concat(self, Doc::line()).into(); 245 | Concat(lhs, other.into()).into() 246 | } 247 | 248 | pub fn concat_plus(self, rhs : impl Into) -> Doc { 249 | let lhs = Concat(self, Text(format!(" ")).into()).into(); 250 | Concat(lhs, rhs.into()).into() 251 | } 252 | 253 | } 254 | 255 | pub fn word_wrap_val(s : impl Iterator) -> Doc { 256 | let mut fold_source = s.enumerate() 257 | .map(|(idx, elem)| { 258 | if idx == 0 { 259 | elem.clone() 260 | } else { 261 | Doc::line().concat(elem.clone()).group() 262 | } 263 | }); 264 | match fold_source.next() { 265 | None => Doc::from(""), 266 | Some(init) => fold_source.fold(init, |acc, next| acc.concat(next)) 267 | } 268 | } 269 | 270 | 271 | pub struct Parenable { 272 | pub priority : usize, 273 | pub doc : Doc 274 | } 275 | 276 | 277 | 278 | impl Parenable { 279 | 280 | pub fn new(priority : usize, doc : Doc) -> Self { 281 | Parenable { 282 | priority, 283 | doc 284 | } 285 | } 286 | 287 | pub fn new_max(doc : Doc) -> Self { 288 | Parenable { 289 | priority : MAX_PRIORITY, 290 | doc 291 | } 292 | } 293 | 294 | pub fn parens(&self, new_priority : usize) -> Doc { 295 | if new_priority > self.priority { 296 | Doc::from("(").concat(self.doc.clone()).concat(")") 297 | 298 | } else { 299 | self.doc.clone() 300 | } 301 | } 302 | } 303 | -------------------------------------------------------------------------------- /src/pretty/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod components; 2 | pub mod pretty_printer; -------------------------------------------------------------------------------- /src/pretty/pretty_printer.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::sync::Arc; 3 | use hashbrown::HashSet; 4 | use parking_lot::RwLock; 5 | 6 | use crate::name::Name; 7 | use crate::level::{ Level, InnerLevel::* }; 8 | use crate::expr::{ Expr, InnerExpr::*, Binding, BinderStyle }; 9 | use crate::tc::TypeChecker; 10 | use crate::env::{ Declaration, Env }; 11 | use crate::pretty::components::{ word_wrap_val, Notation, Parenable, Notation::*, Doc, InnerDoc::*, MAX_PRIORITY }; 12 | 13 | // We're using a RefCell since we need the ability to 14 | // make mutable borrrows recursively, but we don't need to 15 | // go across threads. A recursive mutex would be overkill. 16 | #[derive(Clone)] 17 | pub struct PrettyPrinter { 18 | pub pp_options : PPOptions, 19 | pub tc : RefCell, 20 | pub used_lcs : RefCell> 21 | } 22 | 23 | 24 | impl PrettyPrinter { 25 | pub fn new(options : Option, env : &Arc>) -> Self { 26 | let options = options.unwrap_or_else(|| PPOptions::new_default()); 27 | PrettyPrinter { 28 | pp_options : options, 29 | tc : RefCell::new(TypeChecker::new(Some(true), env.clone())), 30 | used_lcs : RefCell::new(HashSet::with_capacity(100)) 31 | } 32 | } 33 | 34 | pub fn lookup_notation(&self, name : &Name) -> Option { 35 | self.tc.borrow().env.read().notations.get(name).cloned() 36 | } 37 | 38 | pub fn nest(&self, doc : Doc) -> Doc { 39 | doc.group().nest(self.pp_options.indent) 40 | } 41 | 42 | 43 | pub fn pp_name(&self, n : &Name) -> Doc { 44 | Text(format!("{}", n)).into() 45 | } 46 | 47 | pub fn pp_level(&self, lvl : &Level) -> Parenable { 48 | match lvl.as_ref() { 49 | Max(a, b) => { 50 | let doc = Doc::from("max") 51 | .concat_plus(self.pp_level(a).parens(1)) 52 | .concat_line(self.pp_level(b).parens(1)); 53 | Parenable::new(0, doc) 54 | }, 55 | IMax(a, b) => { 56 | let doc = Doc::from("imax") 57 | .concat_plus(self.pp_level(a).parens(1)) 58 | .concat_line(self.pp_level(b).parens(1)); 59 | Parenable::new(0, doc) 60 | 61 | }, 62 | Param(p) => Parenable::new_max(self.pp_name(p)), 63 | _ => { 64 | let (n, inner) = lvl.to_offset(); 65 | match inner.as_ref() { 66 | Zero => Parenable::new_max(Doc::from(format!("{}", n))), 67 | _ => { 68 | let doc = self.pp_level(inner).parens(1) 69 | .concat("+") 70 | .concat(format!("{}", n)); 71 | Parenable::new(0, doc) 72 | } 73 | } 74 | } 75 | } 76 | } 77 | 78 | pub fn already_used(&self, n : &Name) -> bool { 79 | self.used_lcs.borrow().contains(n) 80 | || self.tc.borrow().env.read().declarations.get(n).is_some() 81 | } 82 | 83 | 84 | pub fn sanitize_name(&self, n : &Name) -> Name { 85 | let as_string = format!("{}", n); 86 | let filtered = as_string.chars() 87 | .filter(|c| c.is_alphanumeric() || *c == '_') 88 | .skip_while(|c| c.is_digit(10) || *c == '_') 89 | .collect::(); 90 | if filtered.is_empty() { 91 | Name::from("a") 92 | } else { 93 | return Name::from(filtered.as_str()) 94 | } 95 | } 96 | 97 | // This is why Lean's pretty printer is so hard to read w/ `B_ih_1_a_1_hwf` 98 | pub fn find_unused(&self, base : &Name, idx : usize) -> Name { 99 | let n = Name::from(format!("{}_{}", base, idx).as_str()); 100 | if self.already_used(&n) { 101 | self.find_unused(base, idx + 1) 102 | } else { 103 | n 104 | } 105 | } 106 | 107 | 108 | pub fn fresh_name(&self, suggestion : &Name) -> Name { 109 | let sanitized = self.sanitize_name(suggestion); 110 | let fresh = if self.already_used(&sanitized) { 111 | self.find_unused(&sanitized, 0) 112 | } else { 113 | sanitized 114 | }; 115 | 116 | self.used_lcs.borrow_mut().insert(fresh.clone()); 117 | return fresh 118 | } 119 | 120 | pub fn remove_lc(&self, target : &Name) { 121 | self.used_lcs.borrow_mut().remove(target); 122 | } 123 | 124 | pub fn pp_bare_binder(&self, binding : &Binding) -> Doc { 125 | self.pp_name(&binding.pp_name) 126 | .concat_plus(":") 127 | .concat_line(self.pp_expr(&binding.ty).parens(1).group()) 128 | } 129 | 130 | pub fn is_implicit(&self, fun : &Expr) -> bool { 131 | let inferred = self.tc.borrow_mut().infer(fun); 132 | match self.tc.borrow_mut().whnf(&inferred).as_ref() { 133 | Pi(_, dom, _) => dom.style != BinderStyle::Default, 134 | _ => false 135 | } 136 | } 137 | 138 | 139 | pub fn pp_levels(&self, lvls : &Vec) -> Doc { 140 | let as_docs = lvls.into_iter().map(|lvl| { 141 | self.pp_level(lvl).parens(0) 142 | }); 143 | Doc::from("{") 144 | .concat(word_wrap_val(as_docs)) 145 | .concat("}") 146 | .group() 147 | } 148 | 149 | pub fn telescope(&self, head : Option, binders : &[ParsedBinder]) -> Vec { 150 | let mut acc = Vec::with_capacity(binders.len() + 1); 151 | if let Some(hd) = head { 152 | acc.push(hd); 153 | } 154 | 155 | self.telescope_core(binders, &mut acc); 156 | acc 157 | } 158 | 159 | pub fn telescope_core(&self, binders : &[ParsedBinder], acc : &mut Vec) { 160 | let (hd, _) = match binders.split_first() { 161 | Some((hd, tl)) => (hd, tl), 162 | None => return 163 | }; 164 | 165 | let (group, rest) = if hd.style() == BinderStyle::InstImplicit { 166 | (binders.split_at(1)) 167 | } else { 168 | let closure = |b : &ParsedBinder| b.style() == hd.style() && b.ty() == hd.ty(); 169 | take_while_slice(binders, closure) 170 | }; 171 | 172 | let mapped_group = group.iter().map(|b| { 173 | match b.is_anon && !b.occurs_in_body { 174 | true => Doc::from("_"), 175 | false => self.pp_name(b.name()) 176 | } 177 | }); 178 | 179 | let bare = word_wrap_val(mapped_group) 180 | .concat_plus(":") 181 | .concat_line(self.pp_expr(hd.ty()).parens(1).group()); 182 | 183 | let match_result = match hd.style() { 184 | BinderStyle::Default => Doc::from("(").concat(bare).concat(")"), 185 | BinderStyle::Implicit => Doc::from("{").concat(bare).concat("}"), 186 | BinderStyle::StrictImplicit => Doc::from("{{").concat(bare).concat("}}"), 187 | BinderStyle::InstImplicit => Doc::from("[").concat(bare).concat("]"), 188 | }; 189 | 190 | acc.push(self.nest(match_result)); 191 | self.telescope_core(rest, acc); 192 | } 193 | 194 | 195 | pub fn pp_binders(&self, binders : &[ParsedBinder], inner : Parenable) -> Parenable { 196 | if let Some((hd, tl)) = binders.split_first() { 197 | if hd.is_imp() { 198 | let doc = self.nest(self.pp_expr(hd.ty()).parens(25)) 199 | .concat_plus("→") 200 | .concat(Doc::line()).group() 201 | .concat(self.pp_binders(tl, inner).parens(24)); 202 | Parenable::new(24, doc) 203 | } else if hd.is_forall() { 204 | let (group, rest) = take_while_slice(binders, |x| x.is_forall()); 205 | let telescoped = word_wrap_val(self.telescope(None, group).into_iter()); 206 | let doc = self.nest(Doc::from("∀").concat_plus(telescoped) 207 | .concat(",")) 208 | .concat_line(self.pp_binders(rest, inner).parens(0)); 209 | Parenable::new(0, doc) 210 | } else { 211 | assert!(hd.is_lambda()); 212 | let (group, rest) = take_while_slice(binders, |x| x.is_lambda()); 213 | let telescoped = word_wrap_val(self.telescope(None, group).into_iter()); 214 | let doc = self.nest(Doc::from("λ").concat_plus(telescoped) 215 | .concat(",")) 216 | .concat_line(self.pp_binders(rest, inner).parens(0)); 217 | Parenable::new(0, doc) 218 | } 219 | } else { 220 | return inner 221 | } 222 | } 223 | 224 | 225 | 226 | pub fn const_name(&self, n : &Name) -> Parenable { 227 | if !self.pp_options.implicit { 228 | Parenable::new_max(self.pp_name(n)) 229 | } else { 230 | Parenable::new_max(Doc::from("@").concat(self.pp_name(n))) 231 | } 232 | } 233 | 234 | pub fn pp_app_core(&self, e : &Expr) -> Parenable { 235 | let mut apps = Vec::new(); 236 | let mut acc = e; 237 | 238 | while let App(_, lhs, rhs) = acc.as_ref() { 239 | if !self.pp_options.implicit && self.is_implicit(lhs) { 240 | acc = lhs; 241 | } else { 242 | apps.push(rhs.clone()); 243 | acc = lhs; 244 | } 245 | } 246 | 247 | match acc.as_ref() { 248 | _ if apps.is_empty() => self.pp_expr(acc), 249 | Const(_, name, _) if self.pp_options.notation => { 250 | match self.lookup_notation(name) { 251 | Some(Prefix(_, ref prio, ref op)) if apps.len() == 1 => { 252 | let z = &apps[apps.len() - 1]; 253 | let doc = Doc::from(op) 254 | .concat(Doc::zero_width_line()) 255 | .group() 256 | .concat(self.pp_expr(z).parens(*prio)); 257 | Parenable::new(prio - 1, doc) 258 | }, 259 | Some(Postfix(_, ref prio, ref op)) if apps.len() == 1 => { 260 | let z = &apps[apps.len() - 1]; 261 | let doc = Doc::from(self.pp_expr(z).parens(*prio)) 262 | .concat(Doc::zero_width_line()) 263 | .concat(op).group(); 264 | Parenable::new(prio - 1, doc) 265 | }, 266 | Some(Infix(_, ref prio, ref op)) if apps.len() == 2 => { 267 | let z = &apps[apps.len() - 1]; 268 | let s = &apps[apps.len() - 2]; 269 | let doc = self.pp_expr(z).parens(*prio) 270 | .concat(op) 271 | .concat(Doc::zero_width_line()) 272 | .concat(self.pp_expr(s).parens(*prio)); 273 | Parenable::new(prio - 1, self.nest(doc)) 274 | }, 275 | _ => self.print_default(acc, &apps) 276 | } 277 | }, 278 | _ => self.print_default(acc, &apps) 279 | } 280 | } 281 | 282 | pub fn print_default(&self, f : &Expr, apps : &Vec) -> Parenable { 283 | let iter = Some(self.pp_expr(f).parens(MAX_PRIORITY - 1).group()) 284 | .into_iter() 285 | .chain(apps.into_iter().rev().map(|app| { 286 | self.pp_expr(&app).parens(MAX_PRIORITY).group() 287 | })); 288 | 289 | Parenable::new(MAX_PRIORITY - 1, self.nest(word_wrap_val(iter))) 290 | } 291 | 292 | pub fn pp_sort_core(&self, level : &Level) -> Parenable { 293 | if level.is_zero() && self.pp_options.notation { 294 | Parenable::new_max(Doc::from("Prop")) 295 | } else if let Succ(x) = level.as_ref() { 296 | Parenable::new_max(Doc::from("Type").concat_plus(self.pp_level(x).parens(MAX_PRIORITY))) 297 | } else { 298 | Parenable::new_max(Doc::from("Sort").concat_plus(self.pp_level(level).parens(MAX_PRIORITY))) 299 | } 300 | } 301 | 302 | pub fn pp_const_core(&self, name : &Name, levels : &Vec) -> Parenable { 303 | if self.tc.borrow().env.read().declarations.get(name).is_some() { 304 | self.const_name(name) 305 | } else { 306 | let uparams = if levels.is_empty() { 307 | Doc::from("") 308 | .concat(self.pp_levels(levels.as_ref())) 309 | } else { 310 | Doc::from(".") 311 | .concat(self.pp_levels(levels.as_ref())) 312 | }; 313 | let doc = Doc::from("@") 314 | .concat(self.pp_name(name)) 315 | .concat(uparams); 316 | 317 | Parenable::new_max(doc) 318 | } 319 | } 320 | 321 | 322 | pub fn pp_let_core(&self, dom : &Binding, val : &Expr, body : &Expr) -> Parenable { 323 | let suggestion = dom.clone().as_local(); 324 | assert!(suggestion.is_local()); 325 | let binding = Binding::from(&suggestion); 326 | let fresh_lc_name = self.fresh_name(&binding.pp_name); 327 | let swapped_lc = suggestion.swap_local_binding_name(&fresh_lc_name); 328 | 329 | let instd = body.instantiate(Some(&swapped_lc).into_iter()); 330 | let doc = self.nest(Doc::from("let").concat_plus(self.pp_bare_binder(&swapped_lc.lc_binding()).group()) 331 | .concat_plus(":=") 332 | .concat_line(self.pp_expr(val).parens(0).group()) 333 | .concat("in")) 334 | .concat_line(self.pp_expr(&instd).parens(0)).group(); 335 | let result = Parenable::new(0, doc); 336 | 337 | self.remove_lc(&fresh_lc_name); 338 | result 339 | } 340 | 341 | pub fn pp_expr(&self, e : &Expr) -> Parenable { 342 | if !self.pp_options.proofs && self.tc.borrow_mut().is_proof(e).0 { 343 | return Parenable::new_max("_".into()) 344 | } 345 | 346 | match e.as_ref() { 347 | Var(_, idx) => Parenable::new_max(format!("#{}", idx).into()), 348 | Sort(_, level) => self.pp_sort_core(level), 349 | Const(_, name, levels) => self.pp_const_core(name, levels.as_ref()), 350 | Local(.., of) => Parenable::new_max(self.pp_name(&of.pp_name)), 351 | | Lambda(..) 352 | | Pi(..) => { 353 | let (binders, instd) = self.parse_binders(e); 354 | let new_inner = self.pp_expr(&instd); 355 | let new_result = self.pp_binders(binders.as_slice(), new_inner); 356 | self.restore_lc_names(&binders); 357 | new_result 358 | } 359 | Let(_, dom, val, body) => self.pp_let_core(dom, val, body), 360 | App(..) => self.pp_app_core(e) 361 | } 362 | 363 | } 364 | 365 | pub fn restore_lc_names(&self, binders : &Vec) { 366 | for elem in binders.into_iter().rev() { 367 | self.used_lcs.borrow_mut().remove(&elem.lc.lc_binding().pp_name); 368 | } 369 | } 370 | 371 | pub fn get_ups(&self, declar : &Declaration) -> Doc { 372 | match declar.univ_params.as_ref() { 373 | v if v.is_empty() => Doc::from(""), 374 | v => Doc::from(" ").concat(self.pp_levels(v)) 375 | } 376 | } 377 | 378 | 379 | pub fn main_def(&self, declar : &Declaration, val : Expr) -> Doc { 380 | let (binders, ty) = self.parse_binders(&declar.ty); 381 | 382 | // inlined parse_params 383 | let mut slice_split_idx = 0usize; 384 | let mut val_acc = &val; 385 | // break loop when at least one of these three conditions is true : 386 | // 1. binders is exhausted 387 | // 2. val is no longer a Lambda 388 | // 3. is_forall(popped element) == false 389 | for elem in binders.iter() { 390 | match val_acc.as_ref() { 391 | Lambda(.., inner_val) if elem.is_forall() => { 392 | slice_split_idx += 1; 393 | val_acc = inner_val; 394 | }, 395 | _ => break 396 | } 397 | } 398 | let (params_slice, binders_slice) = binders.split_at(slice_split_idx); 399 | let instd = val_acc.instantiate(params_slice.into_iter().rev().map(|x| &x.lc)); 400 | // end inlined 401 | 402 | let is_prop = self.tc.borrow_mut().is_proposition(&declar.ty); 403 | let cmd = match is_prop { 404 | true => "lemma", 405 | false => "def" 406 | }; 407 | 408 | let pp_val = match is_prop && !self.pp_options.proofs { 409 | true => "_".into(), 410 | false => self.pp_expr(&instd).parens(0).group() 411 | }; 412 | 413 | 414 | let new_telescoped = self.telescope(Some(self.pp_name(&declar.name)), params_slice); 415 | 416 | let sub_doc_new = self.nest(word_wrap_val(new_telescoped.into_iter())) 417 | .concat_plus(":") 418 | .concat_line(self.pp_binders(binders_slice, self.pp_expr(&ty)).parens(0).group()) 419 | .concat_plus(":="); 420 | 421 | 422 | let result = Doc::from(cmd).concat(self.get_ups(declar)) 423 | .concat_plus(self.nest(sub_doc_new)) 424 | .concat_line(pp_val) 425 | .concat(Doc::line()); 426 | 427 | self.restore_lc_names(&binders); 428 | result 429 | } 430 | 431 | 432 | pub fn main_axiom(&self, declar : &Declaration) -> Doc { 433 | let (binders, instd) = self.parse_binders(&declar.ty); 434 | let doc = { 435 | let (prms, rst) = take_while_slice(binders.as_slice(), |x| x.is_forall()); 436 | let telescoped = self.telescope(Some(self.pp_name(&declar.name)), prms); 437 | let sub_doc_new = self.nest(word_wrap_val(telescoped.into_iter()) 438 | .concat_plus(":") 439 | .concat_line( 440 | self.pp_binders( 441 | rst, self.pp_expr(&instd)).parens(0).group())); 442 | Doc::from("axiom").concat(self.get_ups(declar)) 443 | .concat_plus(sub_doc_new) 444 | .concat(Doc::line()) 445 | }; 446 | self.restore_lc_names(&binders); 447 | match declar.builtin { 448 | true => Doc::from("/- builtin -/").concat_plus(doc), 449 | false => doc 450 | } 451 | } 452 | 453 | pub fn pp_main(&self, declar : &Declaration) -> Doc { 454 | 455 | let env_result = self.tc.borrow() 456 | .env 457 | .read() 458 | .get_value(&declar.name) 459 | .cloned(); 460 | match env_result { 461 | // definition/lemma branch 462 | Some(val) => self.main_def(declar, val.clone()), 463 | // axiom branch 464 | None => self.main_axiom(declar) 465 | } 466 | 467 | } 468 | 469 | pub fn render_expr(&self, e : &Expr) -> String { 470 | self.pp_expr(e).doc.group().render(80) 471 | } 472 | 473 | 474 | pub fn print_declar(options : Option, n : &Name, env : &Arc>) -> String { 475 | let declar = match env.read().declarations.get(n) { 476 | Some(d) => d.clone(), 477 | None => return String::new() 478 | }; 479 | 480 | let pp = PrettyPrinter::new(options, env); 481 | 482 | pp.pp_main(&declar) 483 | .group() 484 | .render(pp.pp_options.width) 485 | } 486 | 487 | pub fn parse_binders(&self, e : &Expr) -> (Vec, Expr) { 488 | let mut acc = e; 489 | let mut ctx = Vec::::new(); 490 | 491 | while let | Pi(_, dom, body) 492 | | Lambda(_, dom, body) = acc.as_ref() { 493 | let new_name = self.fresh_name(&dom.pp_name); 494 | let new_ty = dom.ty.instantiate(ctx.iter().rev().map(|x| &x.lc)); 495 | let new_dom = Binding::mk(new_name, new_ty, dom.style); 496 | let new_local = new_dom.as_local(); 497 | let new_parsed_binder = ParsedBinder::new(acc.binder_is_pi(), 498 | has_var(body, 0), 499 | dom.pp_name.is_anon(), 500 | new_local); 501 | ctx.push(new_parsed_binder); 502 | acc = body; 503 | } 504 | 505 | let instd = acc.instantiate(ctx.iter().rev().map(|x| &x.lc)); 506 | (ctx, instd) 507 | } 508 | 509 | } 510 | 511 | #[derive(Debug, Clone, PartialEq)] 512 | pub struct ParsedBinder { 513 | pub is_pi : bool, 514 | pub occurs_in_body : bool, 515 | pub is_anon : bool, 516 | pub lc : Expr, 517 | } 518 | 519 | impl ParsedBinder { 520 | pub fn new(is_pi : bool, 521 | occurs_in_body : bool, 522 | is_anon : bool, 523 | lc : Expr) -> Self { 524 | 525 | ParsedBinder { 526 | is_pi, 527 | occurs_in_body, 528 | is_anon, 529 | lc, 530 | } 531 | } 532 | 533 | pub fn is_imp(&self) -> bool { 534 | self.is_pi 535 | && self.lc.lc_binding().style == BinderStyle::Default 536 | && self.is_anon 537 | && !self.occurs_in_body 538 | } 539 | 540 | pub fn is_forall(&self) -> bool { 541 | self.is_pi && !self.is_imp() 542 | } 543 | 544 | pub fn is_lambda(&self) -> bool { 545 | !self.is_pi 546 | } 547 | 548 | pub fn style(&self) -> BinderStyle { 549 | self.lc.lc_binding().style 550 | } 551 | 552 | pub fn ty(&self) -> &Expr { 553 | &self.lc.lc_binding().ty 554 | } 555 | 556 | pub fn name(&self) -> &Name { 557 | &self.lc.lc_binding().pp_name 558 | } 559 | } 560 | 561 | 562 | pub fn has_var(e : &Expr, i : u64) -> bool { 563 | if e.var_bound() as u64 <= i { 564 | return false 565 | } 566 | match e.as_ref() { 567 | Var(_, idx) => *idx == i, 568 | App(_, a, b) => has_var(a, i) || has_var(b, i), 569 | Lambda(_, dom, body) => has_var(&dom.ty, i) || has_var(body, i + 1), 570 | Pi(_, dom, body) => has_var(&dom.ty, i) || has_var(body, i + 1), 571 | Let(_, dom, val, body) => has_var(&dom.ty, i) || has_var(val, i) || has_var(body, i + 1), 572 | _ => unreachable!() 573 | } 574 | } 575 | 576 | pub fn take_while_slice(s : &[T], f : impl Fn(&T) -> bool) -> (&[T], &[T]) { 577 | let mut idx = 0usize; 578 | while idx < s.len() && f(&s[idx]) { 579 | idx += 1 580 | } 581 | let lhs = &s[0..idx]; 582 | let rhs = &s[idx..]; 583 | (lhs, rhs) 584 | } 585 | 586 | pub fn render_expr(e : &Expr, env : &Arc>) -> String { 587 | let pp = PrettyPrinter::new(None, env); 588 | pp.pp_expr(e) 589 | .doc 590 | .group() 591 | .render(pp.pp_options.width) 592 | } 593 | 594 | 595 | #[derive(Clone)] 596 | pub struct PPOptions { 597 | pub all : bool, 598 | pub implicit : bool, 599 | pub notation : bool, 600 | pub proofs : bool, 601 | pub locals_full_names : bool, 602 | pub indent : usize, 603 | pub width : usize 604 | } 605 | 606 | impl PPOptions { 607 | pub fn new_all_false() -> Self { 608 | PPOptions { 609 | all : false, 610 | implicit : false, 611 | notation : false, 612 | proofs : false, 613 | locals_full_names : false, 614 | indent : 0usize, 615 | width : 0usize 616 | } 617 | } 618 | 619 | pub fn new_default() -> Self { 620 | PPOptions { 621 | all : false, 622 | implicit : false, 623 | notation : true, 624 | proofs : true, 625 | locals_full_names : false, 626 | indent : 2usize, 627 | width : 80usize 628 | } 629 | } 630 | } -------------------------------------------------------------------------------- /src/quot.rs: -------------------------------------------------------------------------------- 1 | 2 | use std::sync::Arc; 3 | 4 | use crate::chain; 5 | use crate::name::Name; 6 | use crate::level::{ mk_param, }; 7 | use crate::reduction::ReductionRule; 8 | use crate::env::{ Declaration, Modification, CompiledModification }; 9 | use crate::expr::{ BinderStyle::*, 10 | mk_prop, 11 | mk_local, 12 | mk_const, 13 | mk_app, 14 | mk_sort }; 15 | 16 | 17 | /// Quot ends up being four introduction rules and one reduction rule 18 | /// which are declared once, very early on in the export file 19 | /// (right after the inductive definition of equality). 20 | /// This module is pretty much just "by hand" assembly of Quot. I'm not sure 21 | /// why the export file doesn't lend more help in putting this together. 22 | #[derive(Clone)] 23 | pub struct Quot { 24 | pub declarations: Vec, 25 | pub reduction_rule: ReductionRule, 26 | } 27 | 28 | pub fn new_quot() -> Modification { 29 | // There are a bunch of expressions that get used ad nauseum here, 30 | // so we define some of them as reusable omponents to make later definitions 31 | // (a little bit) more compact. The key definitions are annotated with their 32 | // lean equivalent 33 | let prop = mk_prop(); 34 | let param_u = || mk_param("u"); 35 | let param_v = || mk_param("v"); 36 | let params_u = || Arc::new(vec![param_u()]); 37 | let params_uv = || Arc::new(vec![param_u(), param_v()]); 38 | let sort_u = mk_sort(mk_param("u")); 39 | let _A = mk_local("A", mk_sort(param_u()), Implicit); 40 | let _B = mk_local("B", mk_sort(mk_param("v")), Implicit); 41 | let _R = mk_local("R", _A.mk_arrow(&_A.mk_arrow(&prop)), Default); 42 | let _f = mk_local("f", _A.mk_arrow(&_B), Default); 43 | let _a = mk_local("a", _A.clone(), Default); 44 | let _b = mk_local("b", _A.clone(), Default); 45 | 46 | 47 | let quot_const_univ_u = || mk_const("quot", vec![param_u()]); 48 | let quot_mk_const_univ_u = || mk_const(Name::from("quot").extend_str("mk"), vec![param_u()]); 49 | let quot_pi_app = sort_u.fold_pis(chain![&_A, &_R]); 50 | 51 | // First introduction rule. in Lean : 52 | // quot : Π {α : Sort u}, (α → α → Prop) → Sort u 53 | let quot = Declaration::mk(Name::from("quot"), 54 | params_u(), 55 | quot_pi_app, 56 | None, 57 | Some(true)); 58 | 59 | let quot_mk_f_a = mk_const("quot", params_u()).fold_apps(vec![&_A, &_R]); 60 | 61 | let quot_mk_f = _A.mk_arrow("_mk_f_a); 62 | 63 | // Second introduction rule. In lean : 64 | // quot.mk : Π {α : Sort u} (r : α → α → Prop), α → @quot α r 65 | let quot_mk = Declaration::mk( 66 | Name::from("quot").extend_str("mk"), 67 | params_u(), 68 | quot_mk_f.fold_pis(chain![&_A, &_R]), 69 | None, 70 | Some(true) 71 | ); 72 | 73 | let eq_const = mk_const("eq", vec![param_v()]); 74 | let app1 = mk_app(_f.clone(), _a.clone()); 75 | let app2 = mk_app(_f.clone(), _b.clone()); 76 | let eq_app = eq_const.fold_apps(vec![&_B, &app1, &app2]); 77 | let eq_lhs = _R.fold_apps(vec![&_a, &_b]); 78 | 79 | let inner_app = eq_lhs.mk_arrow(&eq_app); 80 | let lower_const = quot_const_univ_u(); 81 | let lower_apps = lower_const.fold_apps(vec![&_A, &_R]); 82 | let lhs_pis = inner_app.fold_pis(chain![&_a, &_b]); 83 | let triple_arrow = lhs_pis.mk_arrow(&(lower_apps.mk_arrow(&_B))); 84 | let pis_together = triple_arrow.fold_pis(chain![&_A, &_R, &_B, &_f]); 85 | 86 | 87 | // Third introduction rule. In lean : 88 | // quot.lift : Π {α : Sort u} {r : α → α → Prop} {β : Sort v} (f : α → β), 89 | // (∀ (a b : α), r a b → f a = f b) → quot r → β 90 | let quot_lift = Declaration::mk( 91 | quot.name.extend_str("lift"), 92 | params_uv(), 93 | pis_together, 94 | None, 95 | Some(true) 96 | ); 97 | 98 | let B2_arrows_lhs = quot_const_univ_u().fold_apps(vec![&_A, &_R]); 99 | let _B2 = mk_local("B", 100 | B2_arrows_lhs.mk_arrow(&prop), 101 | Implicit); 102 | let _q = mk_local("q", 103 | quot_const_univ_u().fold_apps(vec![&_A, &_R]), 104 | Default); 105 | 106 | let ind_pi_1_inner = quot_mk_const_univ_u().fold_apps(vec![&_A, &_R, &_a]); 107 | let ind_pi_1_mid = _B2.fold_apps(Some(&ind_pi_1_inner)); 108 | let ind_pi_1 = ind_pi_1_mid.fold_pis(chain![&_a]); 109 | let B2_q = _B2.fold_apps(vec![&_q]); 110 | let ind_pi_2 = B2_q.fold_pis(chain![&_q]); 111 | let ind_arrows = ind_pi_1.mk_arrow(&ind_pi_2); 112 | 113 | // Last introduction rule. In Lean : 114 | // quot.ind : ∀ {α : Sort u} {r : α → α → Prop} {β : @quot α r → Prop}, 115 | // (∀ (a : α), β (@quot.mk α r a)) → ∀ (q : @quot α r), β q 116 | let quot_ind = Declaration::mk( 117 | quot.name.extend_str("ind"), 118 | params_u(), 119 | ind_arrows.fold_pis(chain![&_A, &_R, &_B2]), 120 | None, 121 | Some(true) 122 | ); 123 | 124 | let const_eq_v = mk_const("eq", vec![param_v()]); 125 | 126 | 127 | let _h = mk_local("h", 128 | const_eq_v.fold_apps(vec![&_b, &app1, &app2]), 129 | Default); 130 | 131 | let quot_red_arg_const = mk_const(quot_lift.name.clone(), vec![param_u()]); 132 | let arg2_rhs_const = mk_const(quot_mk.name.clone(), vec![param_u()]); 133 | let arg2_rhs_apps = arg2_rhs_const.fold_apps(vec![&_A, &_R, &_a]); 134 | let quot_red_arg2 = quot_red_arg_const.fold_apps(vec![&_A, 135 | &_R, 136 | &_B, 137 | &_f, 138 | &_h, 139 | &arg2_rhs_apps]); 140 | 141 | // Sole reduction rule. 142 | let quot_red = ReductionRule::new_nondef_rr( 143 | &[_A.clone(), _R.clone(), _B.clone(), _f.clone(), _a.clone(), _h.clone()], 144 | quot_red_arg2, 145 | _f.fold_apps(vec![&_a]), 146 | None.into_iter() 147 | ); 148 | 149 | let q = Quot { 150 | declarations : vec![quot, quot_mk, quot_ind, quot_lift], 151 | reduction_rule : quot_red 152 | }; 153 | 154 | Modification::QuotMod(q) 155 | } 156 | 157 | impl Quot { 158 | pub fn compile_self(self) -> CompiledModification { 159 | CompiledModification::CompiledQuotMod(self.declarations, self.reduction_rule) 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /src/reduction.rs: -------------------------------------------------------------------------------- 1 | use std::hash::{ Hash, Hasher }; 2 | use std::sync::Arc; 3 | 4 | use fxhash::hash64; 5 | use hashbrown::HashMap; 6 | 7 | use crate::name::Name; 8 | use crate::level::Level; 9 | use crate::expr::{ Expr, InnerExpr::* }; 10 | use crate::errors; 11 | 12 | 13 | /// Maps (ReductionRule, [(Level, Level)]) to an Exprssion; 14 | /// used to say "given this reduction rule and these universe 15 | /// substitutions, have I computed the resulting expression before?" 16 | /// If so, just return the cached expression calculated earlier. 17 | #[derive(Clone)] 18 | pub struct ReductionCache { 19 | pub inner : HashMap<(ReductionRule, Vec<(Level, Level)>), Expr> 20 | } 21 | 22 | impl ReductionCache { 23 | pub fn with_capacity(n : usize) -> Self { 24 | ReductionCache { 25 | inner : HashMap::with_capacity(n) 26 | } 27 | } 28 | } 29 | 30 | #[derive(Debug, Clone, PartialEq, Eq)] 31 | pub struct ReductionRule { 32 | pub lhs_const_name: Name, 33 | pub lhs: Expr, 34 | pub rhs: Expr, 35 | pub def_eq_constraints: Arc>, 36 | pub lhs_var_bound: u16, 37 | pub lhs_args_size: usize, 38 | pub majors : Vec, 39 | pub digest : u64, 40 | } 41 | 42 | impl Hash for ReductionRule { 43 | fn hash(&self, state : &mut H) { 44 | self.digest.hash(state); 45 | } 46 | } 47 | 48 | /// Reduction rule has two constructors; `new_nonef_rr` is used by 49 | /// inductive types and quotient and has to do a little bit of 50 | /// up-front work before using `new_rr` to finish creating 51 | /// the reduction rule. `Definition` items can just call `new_rr` 52 | /// directly. 53 | impl ReductionRule { 54 | pub fn new_rr(lhs : Expr, rhs : Expr, def_eq_constraints : Vec<(Expr, Expr)>) -> Self { 55 | let lhs_var_bound = lhs.var_bound(); 56 | assert!(!lhs.has_locals()); 57 | assert!(!rhs.has_locals()); 58 | assert!(rhs.var_bound() <= lhs_var_bound); 59 | 60 | let (app_fn, lhs_args) = lhs.unfold_apps_refs(); 61 | let lhs_args_size = lhs_args.len(); 62 | let lhs_const_name = match app_fn.as_ref() { 63 | Const(_, name, _) => name.clone(), 64 | owise => errors::err_rr_const(line!(), owise), 65 | }; 66 | 67 | // IE : Vec[Var(9), Const(..), Var(19), Var(11), Sort(..)] 68 | // becomes Vec[0, 2, 3] 69 | let majors = lhs_args.iter().rev().enumerate().filter_map(|(idx, arg)| { 70 | match arg.as_ref() { 71 | Var(..) => None, 72 | _ => Some(idx) 73 | } 74 | }).collect::>(); 75 | 76 | // Only need to hash these two items since the other fields are derived 77 | // from them. 78 | let digest = hash64(&(&lhs.get_digest(), &rhs.get_digest())); 79 | 80 | ReductionRule { 81 | lhs_const_name, 82 | lhs, 83 | rhs, 84 | def_eq_constraints : Arc::new(def_eq_constraints), 85 | lhs_var_bound, 86 | lhs_args_size : lhs_args_size, 87 | majors : majors, 88 | digest 89 | } 90 | } 91 | 92 | pub fn new_nondef_rr<'r, R>(locals : &[Expr], 93 | lhs : Expr, 94 | rhs : Expr, 95 | def_eq_constraints : R) -> Self 96 | where R : Iterator { 97 | let lhs_abstd = lhs.abstract_(locals.into_iter()); 98 | let rhs_abstd = rhs.abstract_(locals.into_iter()); 99 | 100 | let def_eq_constraints_abstd = def_eq_constraints.map(|(a, b)| { 101 | let c1_a = a.abstract_(locals.into_iter()); 102 | let c2_a = b.abstract_(locals.into_iter()); 103 | (c1_a, c2_a) 104 | }).collect::>(); 105 | 106 | ReductionRule::new_rr(lhs_abstd, rhs_abstd, def_eq_constraints_abstd) 107 | } 108 | 109 | pub fn collect_substs<'l, 's>(&self, 110 | e1 : &'l Expr, 111 | e2 : &'l Expr, 112 | var_subs : &'s mut Vec<&'l Expr>, 113 | univ_subs : &mut Vec<(Level, Level)>) -> bool { 114 | match (e1.as_ref(), e2.as_ref()) { 115 | (App(_, lhs1, rhs1), App(_, lhs2, rhs2)) => { 116 | self.collect_substs(lhs1, lhs2, var_subs, univ_subs) 117 | && self.collect_substs(rhs1, rhs2, var_subs, univ_subs) 118 | }, 119 | (Const(.., n1, lvls1), Const(.., n2, lvls2)) if n1 == n2 => { 120 | for (lhs, rhs) in lvls1.as_ref().clone().into_iter() 121 | .zip(lvls2.as_ref().clone()) { 122 | univ_subs.push((lhs, rhs)); 123 | } 124 | true 125 | }, 126 | (Var(_, idx), _) => { 127 | match var_subs.get_mut(*idx as usize) { 128 | Some(already) => { std::mem::replace(already, e2); }, 129 | None => { 130 | // FIXME find a better way to pad placeholder values. 131 | // var_subs will eventually (on correct execution) 132 | // have every position filled, but AFAIK there aren't any 133 | // guarantees about receiving them in order, so you have to pad. 134 | while var_subs.len() < *idx as usize { 135 | var_subs.push(e2); 136 | } 137 | var_subs.push(e2); 138 | assert!(var_subs.len() == (*idx as usize) + 1); 139 | } 140 | } 141 | true 142 | }, 143 | _ => false 144 | } 145 | } 146 | 147 | pub fn apply_reduction<'l>(&self, 148 | e : Expr, 149 | cache : &mut ReductionCache) 150 | -> Option<(Expr, Vec<(Expr, Expr)>)> { 151 | let mut var_subs = Vec::<&'l Expr>::with_capacity(100); 152 | let mut univ_subs = Vec::with_capacity(100); 153 | 154 | if !self.collect_substs(&self.lhs, &e, &mut var_subs, &mut univ_subs) { 155 | return None 156 | } 157 | 158 | let cached_or_new = match cache.inner.get(&(self.clone(), univ_subs.clone())) { 159 | Some(cached) => cached.clone(), 160 | None => { 161 | let new_cache_val = self.rhs.instantiate_ps(&univ_subs); 162 | cache.inner.insert((self.clone(), univ_subs.clone()), new_cache_val.clone()); 163 | new_cache_val 164 | } 165 | }; 166 | 167 | if self.lhs_var_bound == 0 { 168 | Some((cached_or_new, self.def_eq_constraints.as_ref().clone())) 169 | } else { 170 | let instd_base = cached_or_new.instantiate(var_subs.iter().cloned()); 171 | let instd_constraints = 172 | self.def_eq_constraints.iter() 173 | .map(|(i, j)| { 174 | let i_ = i.instantiate(var_subs.iter().cloned()); 175 | let j_ = j.instantiate(var_subs.iter().cloned()); 176 | (i_, j_) 177 | }).collect::>(); 178 | 179 | Some((instd_base, instd_constraints)) 180 | } 181 | } 182 | 183 | pub fn apply_hd_tl(&self, hd : &Expr, apps : &[Expr], cache : &mut ReductionCache) -> Option<(Expr, Vec<(Expr, Expr)>)> { 184 | if apps.len() < self.lhs_args_size { 185 | return None 186 | } 187 | 188 | let (apps_l, apps_r) = apps.split_at(self.lhs_args_size); 189 | let applied = hd.fold_apps(apps_l); 190 | self.apply_reduction(applied, cache) 191 | .map(|(reduc, cs)| { 192 | let applied = reduc.fold_apps(apps_r); 193 | (applied, cs) 194 | }) 195 | } 196 | 197 | } 198 | 199 | #[derive(Clone)] 200 | pub struct ReductionMap { 201 | pub reduction_rules : HashMap>, 202 | major_premises : HashMap> 203 | } 204 | 205 | 206 | impl ReductionMap { 207 | 208 | pub fn new(num_mods : usize) -> Self { 209 | ReductionMap { 210 | reduction_rules : HashMap::with_capacity(num_mods), 211 | major_premises : HashMap::with_capacity(num_mods), 212 | } 213 | } 214 | 215 | pub fn get_value(&self, n : &Name) -> Option<&Expr> { 216 | for elem in self.reduction_rules.get(n)? { 217 | match elem.lhs.as_ref() { 218 | Const(..) => return Some(&elem.rhs), 219 | _ => continue 220 | } 221 | }; 222 | // if loop fails to find something to return early with... 223 | return None 224 | } 225 | 226 | pub fn apply_to_map(&self, 227 | e : Expr, 228 | cache : &mut ReductionCache) -> Option<(Expr, Vec<(Expr, Expr)>)> { 229 | let (hd, apps) = e.unfold_apps_special(); 230 | 231 | if let Const(_, name, _) = hd.as_ref() { 232 | let source = self.reduction_rules.get(&name).cloned()?; 233 | for elem in source { 234 | match elem.apply_hd_tl(&hd, apps.as_slice(), cache) { 235 | found @ Some(_) => return found, 236 | None => continue 237 | } 238 | } 239 | return None 240 | } else { 241 | return None 242 | } 243 | } 244 | 245 | /// Inserting a new ReductionRule into rules, and ADDING its majors to the cumulative 246 | /// set of the other Name |-> Vec mapping's majors. Also asserting some 247 | /// invariants; if the original lookup of Name |-> Rule was Some, then Majors should 248 | /// exist, and if it was None, then majors should not exist. 249 | pub fn add_rule(&mut self, new_rule : ReductionRule) { 250 | let name_key = new_rule.lhs_const_name.clone(); 251 | let major_prem_vec = new_rule.majors.clone(); 252 | match self.reduction_rules.get_mut(&name_key) { 253 | Some(already_rules) => { 254 | already_rules.push(new_rule); 255 | self.major_premises.get_mut(&name_key) 256 | .unwrap_or_else(|| errors::err_add_rule(line!(), &name_key)) 257 | .extend(major_prem_vec.into_iter()); 258 | }, 259 | None => { 260 | let res1 = self.reduction_rules.insert(name_key.clone(), vec![new_rule]); 261 | let res2 = self.major_premises.insert(name_key.clone(), major_prem_vec); 262 | // assert they weren't already in the map 263 | assert!(res1.is_none() && res2.is_none()); 264 | } 265 | } 266 | } 267 | 268 | pub fn get_major_premises(&self, key : &Name) -> Option<&Vec> { 269 | self.major_premises.get(key) 270 | } 271 | 272 | 273 | } 274 | -------------------------------------------------------------------------------- /src/tc.rs: -------------------------------------------------------------------------------- 1 | 2 | use std::sync::Arc; 3 | use hashbrown::HashMap; 4 | use parking_lot::RwLock; 5 | use stacker::maybe_grow; 6 | 7 | use crate::utils::{ ShortCircuit, ShortCircuit::*, EqCache }; 8 | use crate::name::Name; 9 | use crate::level::{ Level, mk_imax, mk_succ }; 10 | use crate::expr::{ Expr, Binding, InnerExpr::*, mk_app, mk_lambda, mk_var, mk_sort, mk_prop, mk_pi }; 11 | use crate::reduction::ReductionCache; 12 | use crate::env::Env; 13 | use crate::errors::*; 14 | use Flag::*; 15 | 16 | 17 | /// "A Typechecker" is just a collection of caches and a handle to the current 18 | /// environment (we only ever need to read from it in this case). 19 | /// unsafe_unchecked should be true iff the TypeChecker will only ever 20 | /// be used by the pretty printer. 21 | #[derive(Clone)] 22 | pub struct TypeChecker { 23 | unsafe_unchecked: bool, 24 | pub infer_cache : HashMap, 25 | pub eq_cache : EqCache, 26 | pub whnf_cache : HashMap, 27 | pub reduction_cache : ReductionCache, 28 | pub env : Arc>, 29 | } 30 | 31 | impl std::fmt::Debug for TypeChecker { 32 | fn fmt(&self, f : &mut std::fmt::Formatter) -> std::fmt::Result { 33 | write!(f, "") 34 | } 35 | } 36 | 37 | impl TypeChecker { 38 | pub fn new(unsafe_unchecked : Option, env : Arc>) -> Self { 39 | TypeChecker { 40 | unsafe_unchecked : unsafe_unchecked.unwrap_or(false), 41 | infer_cache : HashMap::with_capacity(1000), 42 | eq_cache : EqCache::with_capacity(500), 43 | whnf_cache : HashMap::with_capacity(100), 44 | reduction_cache : ReductionCache::with_capacity(100), 45 | env 46 | } 47 | } 48 | 49 | pub fn fork_env(&self) -> Arc> { 50 | self.env.clone() 51 | } 52 | 53 | pub fn should_check(&self) -> bool { 54 | !self.unsafe_unchecked 55 | } 56 | 57 | /// The "heights" of two terms `E1` and `E2` are used to determine whether one 58 | /// is defined in terms of or uses terms derived from the other. If at some point 59 | /// we need to unify `E1 == E2`, we want to unfold the HIGHER one FIRST, since 60 | /// it will eventually unfold into (something resembling) the lower term, 61 | /// whereas continued unfolding of the lower term will just get us more and 62 | /// more primitive terms that get further away from the goal. 63 | /// Thanks to @Gebner for explaining this to me. 64 | fn def_height(&self, _fn : &Expr) -> u16 { 65 | if let Const(_, name, _) = _fn.as_ref() { 66 | self.env.read() 67 | .declarations 68 | .get(name) 69 | .map(|h| h.height + 1) 70 | .unwrap_or(0u16) 71 | } else { 72 | 0u16 73 | } 74 | } 75 | 76 | /// e is a prop iff it destructures as Sort(Level(Zero)) 77 | pub fn is_prop(&mut self, e : &Expr) -> bool { 78 | match self.whnf(e).as_ref() { 79 | Sort(_, lvl) => lvl.is_zero(), 80 | _ => false 81 | } 82 | } 83 | 84 | /// tries is_prop after inferring e 85 | pub fn is_proposition(&mut self, e : &Expr) -> bool { 86 | let inferred = self.infer(e); 87 | self.is_prop(&inferred) 88 | } 89 | 90 | pub fn is_proof(&mut self, p: &Expr) -> (bool, Expr) { 91 | let inferred = self.infer(p); 92 | (self.is_proposition(&inferred), inferred) 93 | } 94 | 95 | fn is_proof_irrel_eq(&mut self, e1: &Expr, e2: &Expr) -> bool { 96 | match self.is_proof(e1) { 97 | (true, t1) => match self.is_proof(e2) { 98 | (true, t2) => self.check_def_eq(&t1, &t2) == EqShort, 99 | _ => false 100 | }, 101 | _ => false 102 | } 103 | } 104 | 105 | 106 | /// More aggressive version of `unfold_pis`. Given some term `E`, repeats 107 | /// `{ apply whnf(e), then unfold_pis(e) }` until that combination 108 | /// fails to strip any more binders out. 109 | pub fn normalize_pis(&mut self, e : &Expr) -> (Expr, Vec) { 110 | let mut collected_binders = Vec::new(); 111 | let mut acc = e.clone(); 112 | 113 | loop { 114 | let len_before = collected_binders.len(); 115 | acc = self.whnf(&acc); 116 | acc.unfold_pis(&mut collected_binders); 117 | if len_before == collected_binders.len() { 118 | break 119 | } 120 | } 121 | 122 | (acc, collected_binders) 123 | } 124 | 125 | // This only gets used once in inductive. Will use &[Expr] 126 | // that comes as `toplevel_params` used during formation of intro rules. 127 | // I'm not really sure how the length of the subst sequence corresponds 128 | // to the number of times whnf is supposed to be executed to be honest. 129 | pub fn instantiate_pis(&mut self, intro_type : &Expr, toplevel_intro_params : &[Expr]) -> Expr { 130 | let mut iterations_left = toplevel_intro_params.len(); 131 | let mut acc = intro_type.clone(); 132 | 133 | while iterations_left > 0 { 134 | match acc.as_ref() { 135 | Pi(.., body) => { 136 | iterations_left -= 1; 137 | acc = body.clone(); 138 | }, 139 | _ => { 140 | acc = self.whnf(&acc); 141 | // assert that the result is a Pi 142 | assert!(match acc.as_ref() { 143 | Pi(..) => true, 144 | _ => false 145 | }); 146 | } 147 | } 148 | } 149 | 150 | acc.instantiate(toplevel_intro_params.into_iter().rev()) 151 | } 152 | 153 | /// Outward facing function/entry point for reduction to weak head normal form. 154 | /// Checks cache for a previous result, calling whnf_core on a cache miss. 155 | pub fn whnf(&mut self, e : &Expr) -> Expr { 156 | if let Some(cached) = self.whnf_cache.get(e) { 157 | return cached.clone() 158 | } else { 159 | let cache_key = e.clone(); 160 | let result = self.whnf_core(e.clone(), Some(FlagT)); 161 | self.whnf_cache.insert(cache_key, result.clone()); 162 | result 163 | } 164 | } 165 | 166 | pub fn whnf_core(&mut self, mut e : Expr, mut _flag : Option) -> Expr { 167 | loop { 168 | let flag = _flag.unwrap_or(FlagT); 169 | let (_fn, apps) = e.unfold_apps_refs(); 170 | 171 | match _fn.as_ref() { 172 | Sort(_, lvl) => return mk_sort(lvl.simplify()), 173 | Lambda(..) if !apps.is_empty() => { 174 | e = self.whnf_lambda(_fn, apps); 175 | _flag = Some(flag); 176 | }, 177 | Let(.., val, body) => { 178 | let instd = body.instantiate(Some(val).into_iter()); 179 | e = instd.fold_apps(apps.into_iter().rev()); 180 | _flag = Some(flag); 181 | }, 182 | _ => { 183 | let reduced = self.reduce_hdtl(_fn, apps.as_slice(), Some(flag)); 184 | match reduced { 185 | Some(eprime) => { 186 | _flag = Some(flag); 187 | e = eprime; 188 | }, 189 | None => return e 190 | } 191 | } 192 | } 193 | } 194 | } 195 | 196 | pub fn whnf_lambda(&mut self, 197 | mut f : &Expr, 198 | mut apps : Vec<&Expr>) -> Expr { 199 | let mut ctx = Vec::with_capacity(apps.len()); 200 | 201 | while let Lambda(_, _, fn_) = f.as_ref() { 202 | if let Some(hd) = apps.pop() { 203 | ctx.push(hd); 204 | f = fn_; 205 | continue 206 | } else { 207 | break 208 | } 209 | } 210 | 211 | f.instantiate(ctx.into_iter().rev()) 212 | .fold_apps(apps.into_iter().rev()) 213 | } 214 | 215 | /// The entry point for executing a single reduction step on two 216 | /// expressions. 217 | pub fn reduce_exps(&mut self, e1 : Expr, e2 : Expr, flag : Option) -> Option<(Expr, Expr)> { 218 | assert!(flag == Some(FlagT)); 219 | let (fn1, apps1) = e1.unfold_apps_refs(); 220 | let (fn2, apps2) = e2.unfold_apps_refs(); 221 | 222 | // we want to evaluate these lazily. 223 | let red1 = |tc : &mut TypeChecker| tc.reduce_hdtl(fn1, apps1.as_slice(), flag).map(|r| (r, e2.clone())); 224 | let red2 = |tc : &mut TypeChecker| tc.reduce_hdtl(fn2, apps2.as_slice(), flag).map(|r| (e1.clone(), r)); 225 | 226 | if self.def_height(fn1) > self.def_height(fn2) { 227 | red1(self).or(red2(self)) 228 | } else { 229 | red2(self).or(red1(self)) 230 | } 231 | } 232 | 233 | 234 | pub fn reduce_hdtl(&mut self, _fn : &Expr, apps : &[&Expr], flag : Option) -> Option { 235 | 236 | if let Some(FlagF) = flag { 237 | return None 238 | } 239 | 240 | let name : &Name = match _fn.as_ref() { 241 | Const(_, name, _) => (name), 242 | _ => return None 243 | }; 244 | 245 | let major_prems = self.env 246 | .read() 247 | .reduction_map 248 | .get_major_premises(&name) 249 | .cloned(); 250 | 251 | let mut collected = Vec::with_capacity(apps.len()); 252 | 253 | for (idx, elem) in apps.into_iter().rev().enumerate() { 254 | if major_prems 255 | .as_ref() 256 | .map(|set| set.contains(&idx)) 257 | .unwrap_or(false) { 258 | collected.push(self.whnf(&elem)); 259 | } else { 260 | collected.push(elem.clone().clone()); 261 | } 262 | } 263 | 264 | let applied = _fn.fold_apps(collected.iter()); 265 | let (result, constraints) = self.env 266 | .read() 267 | .reduction_map 268 | .apply_to_map(applied, &mut self.reduction_cache)?; 269 | 270 | match constraints.iter() 271 | .all(|(a, b)| self.def_eq(a, b)) { 272 | true => Some(result), 273 | false => None 274 | } 275 | } 276 | 277 | 278 | fn def_eq(&mut self, a : &Expr, b : &Expr) -> bool { 279 | if self.check_def_eq(a, b) == EqShort { 280 | return true 281 | } else { 282 | return false 283 | } 284 | } 285 | 286 | 287 | /// only used in `check_def_eq_patterns`. Broken out 288 | /// to prevent `patterns` from getting too big/hard to read. 289 | pub fn apps_eq(&mut self, 290 | apps1 : Vec<&Expr>, 291 | apps2 : Vec<&Expr>) -> ShortCircuit { 292 | if apps1.len() != apps2.len() { 293 | return NeqShort 294 | } else { 295 | for (a, b) in apps1.iter().zip(apps2).rev() { 296 | let closure = maybe_grow(64 * 1024, 1024 * 1024, || self.check_def_eq(a, b)); 297 | if closure == EqShort { 298 | continue 299 | } else { 300 | return NeqShort 301 | } 302 | } 303 | EqShort 304 | } 305 | } 306 | 307 | /// Main entry point for checking definitional equality of two terms, which 308 | /// dispatches out into a number of different functions. 309 | /// 1. `check_def_eq_core` does some destructuring and reduction to weak head 310 | /// normal form. 311 | /// 2. `check_def_eq_patterns` just consults a big list of cases/patterns 312 | /// to determine which decision procedure it needs to use move forward. 313 | /// 3. `patterns` may call `check_def_eq_pi/lambda` to determine whether 314 | /// a pair of Pi or Lambda expressions are definitionally equal. 315 | pub fn check_def_eq(&mut self, e1 : &Expr, e2 : &Expr) -> ShortCircuit { 316 | // checks for both pointer and structural equality 317 | if e1 == e2 { 318 | return EqShort 319 | } 320 | 321 | // check whether this equality has been seen before. 322 | if let Some(cached) = self.eq_cache.get(&e1, &e2) { 323 | return cached 324 | } 325 | 326 | // otherwise, compute a result, then cache it in case we see these terms again. 327 | let result = if self.is_proof_irrel_eq(e1, e2) { 328 | EqShort 329 | } else { 330 | self.check_def_eq_core(e1, e2) 331 | }; 332 | 333 | self.eq_cache.insert(e1.clone(), e2.clone(), result); 334 | result 335 | } 336 | 337 | 338 | 339 | /// Dispatch point for different decision procedures used to determine 340 | /// whether two expressions are definitionally equal in a certain context. 341 | pub fn check_def_eq_patterns(&mut self, whnfd_1 : &Expr, whnfd_2 : &Expr) -> ShortCircuit { 342 | let (fn1, apps1) = whnfd_1.unfold_apps_refs(); 343 | let (fn2, apps2) = whnfd_2.unfold_apps_refs(); 344 | 345 | match (fn1.as_ref(), fn2.as_ref()) { 346 | (Sort(_, l1), Sort(_, l2)) => 347 | match apps1.is_empty() && apps2.is_empty() { 348 | true => match Level::eq_by_antisymm(l1, l2) { 349 | true => EqShort, 350 | false => NeqShort, 351 | } 352 | _ => NeqShort 353 | }, 354 | (Const(_, n1, lvls1), Const(_, n2, lvls2)) => { 355 | if n1 == n2 && lvls1.iter().zip(lvls2.as_ref()).all(|(a, b)| Level::eq_by_antisymm(a, b)) { 356 | self.apps_eq(apps1, apps2) 357 | } else { 358 | NeqShort 359 | } 360 | }, 361 | (Local(.., of1), Local(.., of2)) => { 362 | if of1 == of2 { 363 | self.apps_eq(apps1, apps2) 364 | } else { 365 | NeqShort 366 | } 367 | }, 368 | (Lambda(..), Lambda(..)) => self.check_def_eq_lambdas(fn1, fn2), 369 | (Lambda(_, dom, _), _) => { 370 | assert!(apps1.is_empty()); 371 | let app = mk_app(whnfd_2.clone(), mk_var(0)); 372 | let new_lam = mk_lambda(dom.clone(), app); 373 | self.check_def_eq_core(fn1, &new_lam) 374 | }, 375 | (_, Lambda(_, dom, _)) => { 376 | let app = mk_app(whnfd_1.clone(), mk_var(0)); 377 | let new_lam = mk_lambda(dom.clone(), app); 378 | self.check_def_eq_core(&new_lam, fn2) 379 | }, 380 | (Pi(..), Pi(..)) => self.check_def_eq_pis(fn1, fn2), 381 | _ => NeqShort 382 | } 383 | } 384 | 385 | 386 | 387 | pub fn check_def_eq_core(&mut self, e1_0 : &Expr, e2_0 : &Expr) -> ShortCircuit { 388 | 389 | let whnfd_1 = self.whnf_core(e1_0.clone(), Some(FlagF)); 390 | let whnfd_2 = self.whnf_core(e2_0.clone(), Some(FlagF)); 391 | 392 | // consult different patterns laid out in 393 | // check_def_eq_patterns to see how to proceed 394 | match self.check_def_eq_patterns(&whnfd_1, &whnfd_2) { 395 | EqShort => return EqShort, 396 | NeqShort => { 397 | match self.reduce_exps(whnfd_1, whnfd_2, Some(FlagT)) { 398 | Some((red1, red2)) => self.check_def_eq_core(&red1, &red2), 399 | _ => return NeqShort 400 | } 401 | }, 402 | _ => unreachable!() 403 | } 404 | } 405 | 406 | 407 | // Literally the same function as its Lambda counterpart, but checks for a different 408 | // enum discriminant (Pis instead of Lambdas). 409 | pub fn check_def_eq_pis(&mut self, mut e1 : &Expr, mut e2 : &Expr) -> ShortCircuit { 410 | 411 | let mut substs = Vec::new(); 412 | 413 | // weird rust syntax; just means 'for as long as e1 and e2 414 | // are both Pi terms, keep executing the code in this block" 415 | while let (Pi(_, dom1, body1), Pi(_, dom2, body2)) = (e1.as_ref(), e2.as_ref()) { 416 | let mut lhs_type = None; 417 | 418 | if dom1 != dom2 { 419 | let instd_d2_ty = dom2.ty.instantiate(substs.iter().rev()); 420 | let instd_d1_ty = dom1.ty.instantiate(substs.iter().rev()); 421 | 422 | lhs_type = Some(dom2.clone().swap_ty(instd_d2_ty.clone())); 423 | // If the domains are found not to be equal, return early 424 | // with NeqShort since the whole thing is therefore not equal 425 | if !self.def_eq(&instd_d1_ty, &instd_d2_ty) { 426 | return NeqShort 427 | } 428 | } 429 | 430 | if (body1.has_vars() || body2.has_vars()) { 431 | let new_local = match lhs_type { 432 | Some(elem) => elem.as_local(), 433 | None => { 434 | let mut _x = dom2.clone(); 435 | let new_ty = _x.ty.instantiate(substs.iter().rev()); 436 | _x.swap_ty(new_ty).as_local() 437 | } 438 | }; 439 | substs.push(new_local); 440 | } else { 441 | substs.push(mk_prop()) 442 | } 443 | 444 | e1 = body1; 445 | e2 = body2; 446 | } 447 | 448 | match self.def_eq(&e1.instantiate(substs.iter().rev()), 449 | &e2.instantiate(substs.iter().rev())) { 450 | true => EqShort, 451 | false => NeqShort 452 | } 453 | } 454 | 455 | 456 | // Literally the same function as its Pi counterpart, but checks for a different 457 | // enum discriminant (Lambdas instead of Pis). 458 | pub fn check_def_eq_lambdas(&mut self, mut e1 : &Expr, mut e2 : &Expr) -> ShortCircuit { 459 | let mut substs = Vec::new(); 460 | 461 | // weird rust syntax; just means "for as long as e1 and e2 462 | // are both Lambda terms, keep executing the code in this block" 463 | while let (Lambda(_, dom1, body1), Lambda(_, dom2, body2)) = (e1.as_ref(), e2.as_ref()) { 464 | let mut lhs_type = None; 465 | 466 | if dom1 != dom2 { 467 | let instd_d2_ty = dom2.ty.instantiate(substs.iter().rev()); 468 | let instd_d1_ty = dom1.ty.instantiate(substs.iter().rev()); 469 | 470 | lhs_type = Some(dom2.clone().swap_ty(instd_d2_ty.clone())); 471 | // If the lambda domains are found not to be equal, return early 472 | // with NeqShort since the whole thing is therefore not equal 473 | if !self.def_eq(&instd_d1_ty, &instd_d2_ty) { 474 | return NeqShort 475 | } 476 | } 477 | 478 | if (body1.has_vars() || body2.has_vars()) { 479 | let new_local = match lhs_type { 480 | Some(elem) => elem.as_local(), 481 | None => { 482 | let mut _x = dom2.clone(); 483 | let new_ty = _x.ty.instantiate(substs.iter().rev()); 484 | _x.swap_ty(new_ty).as_local() 485 | } 486 | }; 487 | substs.push(new_local); 488 | } else { 489 | substs.push(mk_prop()) 490 | } 491 | 492 | e1 = body1; 493 | e2 = body2; 494 | 495 | } 496 | 497 | match self.def_eq(&e1.instantiate(substs.iter().rev()), 498 | &e2.instantiate(substs.iter().rev())) { 499 | true => EqShort, 500 | false => NeqShort 501 | } 502 | } 503 | 504 | 505 | 506 | /// Main dispatch point for type inference. Attempts to return early 507 | /// by checking a cache of previously inferred terms. 508 | /// Some of the methods are fairly long so they've been broken out 509 | /// into separate functions, trusting in the compiler to inline 510 | /// where appropriate. 511 | pub fn infer(&mut self, term : &Expr) -> Expr { 512 | if let Some(cached) = self.infer_cache.get(&term) { 513 | return cached.clone() 514 | } 515 | 516 | let cache_key = term.clone(); 517 | 518 | let result = match term.as_ref() { 519 | Sort(_, lvl) => mk_sort(mk_succ(lvl.clone())), 520 | Const(_, name, lvls) => self.infer_const(name, lvls), 521 | Local(.., bind) => (bind.ty).clone(), 522 | App(..) => self.infer_apps(term), 523 | Lambda(..) => self.infer_lambda(term), 524 | Pi(..) => mk_sort(self.infer_pi(term)), 525 | Let(_, dom, val, body) => self.infer_let(dom, val, body), 526 | owise => err_infer_var(line!(), owise), 527 | }; 528 | 529 | self.infer_cache.insert(cache_key, result.clone()); 530 | 531 | result 532 | } 533 | 534 | 535 | 536 | pub fn infer_const(&mut self, name : &Name, levels : &Arc>) -> Expr { 537 | match self.env.read().declarations.get(name) { 538 | Some(dec) => { 539 | let univ_params = dec.univ_params.as_ref(); 540 | assert!(univ_params.len() == levels.len()); 541 | let subst_map = univ_params.clone().into_iter().zip(levels.as_ref().clone()).collect::>(); 542 | dec.ty.instantiate_ps(&subst_map) 543 | }, 544 | None => err_infer_const(line!(), name) 545 | } 546 | } 547 | 548 | pub fn infer_lambda(&mut self, mut term : &Expr) -> Expr { 549 | let mut domains = Vec::with_capacity(50); 550 | let mut locals = Vec::with_capacity(50); 551 | 552 | while let Lambda(_, ref old_dom, ref old_body) = term.as_ref() { 553 | domains.push(old_dom.clone()); 554 | let new_dom_ty = old_dom.ty.instantiate(locals.iter().rev()); 555 | let new_dom = old_dom.clone().swap_ty(new_dom_ty.clone()); 556 | 557 | if self.should_check() { 558 | self.infer_universe_of_type(&new_dom_ty); 559 | } 560 | 561 | let new_local = new_dom.as_local(); 562 | locals.push(new_local); 563 | term = old_body; 564 | } 565 | 566 | let instd = term.instantiate(locals.iter().rev()); 567 | let inferred = self.infer(&instd); 568 | let mut abstrd = inferred.abstract_(locals.iter().rev()); 569 | 570 | while let Some(d) = domains.pop() { 571 | abstrd = mk_pi(d, abstrd); 572 | } 573 | 574 | abstrd 575 | } 576 | 577 | 578 | 579 | 580 | pub fn infer_universe_of_type(&mut self, term : &Expr) -> Level { 581 | let inferred = self.infer(term); 582 | match self.whnf(&inferred).as_ref() { 583 | Sort(_, lvl) => lvl.clone(), 584 | owise => err_infer_universe(line!(), owise), 585 | } 586 | } 587 | 588 | 589 | fn infer_apps(&mut self, term : &Expr) -> Expr { 590 | let (fn_, mut apps) = term.unfold_apps_refs(); 591 | 592 | let mut acc = self.infer(fn_); 593 | let mut context = Vec::<&Expr>::with_capacity(apps.len()); 594 | 595 | while let Some(elem) = apps.pop() { 596 | if let Pi(_, ref old_dom, ref old_body) = acc.as_ref() { 597 | if self.should_check() { 598 | let new_dom_ty = old_dom.ty 599 | .instantiate(context.iter().map(|x| *x).rev()); 600 | self.check_type(elem, &new_dom_ty); 601 | } 602 | context.push(elem); 603 | acc = (old_body).clone(); 604 | } else { 605 | let instd = acc.instantiate(context.iter().map(|x| *x).rev()); 606 | let whnfd = self.whnf(&instd); 607 | match whnfd.as_ref() { 608 | Pi(..) => { 609 | apps.push(elem); 610 | context = Vec::new(); 611 | acc = whnfd; 612 | }, 613 | owise => err_infer_apps(line!(), owise), 614 | } 615 | } 616 | } 617 | 618 | acc.instantiate(context.iter().map(|x| *x).rev()) 619 | } 620 | 621 | 622 | pub fn infer_pi(&mut self, mut term : &Expr) -> Level { 623 | let mut locals = Vec::new(); 624 | let mut universes = Vec::new(); 625 | 626 | while let Pi(_, ref old_dom, ref old_body) = term.as_ref() { 627 | let new_dom_ty = old_dom.ty.instantiate(locals.iter().rev()); 628 | let new_dom = old_dom.clone().swap_ty(new_dom_ty.clone()); 629 | let dom_univ = self.infer_universe_of_type(&new_dom_ty); 630 | universes.push(dom_univ); 631 | let new_local = new_dom.as_local(); 632 | locals.push(new_local); 633 | term = old_body; 634 | } 635 | 636 | let instd = term.clone().instantiate(locals.iter().rev()); 637 | let mut inferred = self.infer_universe_of_type(&instd); 638 | //let inferred = self.infer_universe_of_type(&instd); 639 | //foldr(|acc, next| mk_imax(acc, next), universes, inferred) 640 | 641 | while let Some(u) = universes.pop() { 642 | inferred = mk_imax(u, inferred); 643 | }; 644 | 645 | inferred 646 | } 647 | 648 | pub fn infer_let(&mut self, dom : &Binding, val : &Expr, body : &Expr) -> Expr { 649 | if self.should_check() { 650 | self.infer_universe_of_type(&dom.ty); 651 | } 652 | if self.should_check() { 653 | self.check_type(val, &dom.ty); 654 | } 655 | 656 | let instd_body = body.instantiate(Some(val).into_iter()); 657 | self.infer(&instd_body) 658 | } 659 | 660 | 661 | pub fn check_type(&mut self, e : &Expr, ty : &Expr) { 662 | let inferred = self.infer(e); 663 | match self.check_def_eq(ty, &inferred) { 664 | EqShort => (), 665 | _ => err_check_type(line!(), e, ty), 666 | } 667 | } 668 | 669 | pub fn require_def_eq(&mut self, e1 : &Expr, e2 : &Expr) { 670 | match self.check_def_eq(e1, e2) { 671 | EqShort => (), 672 | _ => err_req_def_eq(line!(), e1, e2) 673 | } 674 | } 675 | 676 | } 677 | 678 | 679 | 680 | /// Exercises some control over the degree of reduction. In particular, 681 | /// affects whether `reduce_hdtl()` proceeds in attempting to reduce 682 | /// a constant term. 683 | #[derive(Debug, Clone, Copy, PartialEq)] 684 | pub enum Flag { 685 | FlagT, 686 | FlagF 687 | } 688 | 689 | 690 | -------------------------------------------------------------------------------- /src/utils.rs: -------------------------------------------------------------------------------- 1 | use std::collections::VecDeque; 2 | use std::sync::Arc; 3 | 4 | use hashbrown::HashMap; 5 | use parking_lot::RwLock; 6 | 7 | use crate::expr::Expr; 8 | use crate::env::{ Modification, CompiledModification }; 9 | use crate::pretty::components::Notation; 10 | 11 | use Either::*; 12 | use ShortCircuit::*; 13 | 14 | /// Items used to communicate with the threads looping through 15 | /// the queues that hold the typechecker's work. Needed in order 16 | /// to discriminate between the case of "the queue doesn't have 17 | /// any work for you right now" and "the job this queue was needed 18 | /// for is complete" 19 | pub const END_MSG_ADD : QueueMsg = Right(()); 20 | pub const END_MSG_NOTATION : QueueMsg = Right(()); 21 | pub const END_MSG_CHK : QueueMsg = Right(()); 22 | 23 | 24 | pub fn foldr(f : impl Fn(A, B) -> B, i : I, init : B) -> B 25 | where I : IntoIterator, 26 | I :: IntoIter : DoubleEndedIterator { 27 | i.into_iter().rev().fold(init, |acc, next| f(next, acc)) 28 | } 29 | 30 | /// Used to try and ease some of the pain of working with long sequences. 31 | #[macro_export] 32 | macro_rules! chain { 33 | ( $( $e:expr),* ) => { 34 | { 35 | let acc = None.into_iter(); 36 | 37 | $( 38 | let acc = acc.chain(Some($e).into_iter()); 39 | )* 40 | 41 | acc 42 | } 43 | 44 | }; 45 | } 46 | 47 | /// Used to try and ease some of the pain of working with long sequences. 48 | #[macro_export] 49 | macro_rules! seq { 50 | ( $( $e:expr),* ) => { 51 | { 52 | let mut buf = Vec::new(); 53 | 54 | $( 55 | for elem in $e.into_iter() { 56 | buf.push(elem.to_owned()); 57 | } 58 | )* 59 | buf 60 | } 61 | 62 | }; 63 | } 64 | 65 | 66 | pub fn safe_minus_one(n : u16) -> u16 { 67 | if n == 0 { 68 | n 69 | } else { 70 | n - 1 71 | } 72 | } 73 | 74 | pub fn max3(n1 : u16, n2 : u16, n3 : u16) -> u16 { 75 | n1.max(n2).max(n3) 76 | } 77 | 78 | 79 | 80 | /// Used frequently the typechecker; we want to be able to communicate 81 | /// the following states to the observer of some return value : 82 | /// 1. These two expressions can be further reduced/inferred, but I can 83 | /// already tell you they're definitionally equal, so don't bother. 84 | /// 2. These two expressions can be further reduced/inferred, but I can 85 | /// already tell you they're NOT definitionally equal. 86 | /// 3. These need more work before I can tell whether or not they're equal. 87 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 88 | pub enum ShortCircuit { 89 | EqShort, 90 | NeqShort, 91 | Unknown, 92 | } 93 | 94 | impl ShortCircuit { 95 | } 96 | 97 | pub fn ss_forall(mut seq : impl Iterator) -> ShortCircuit { 98 | match seq.all(|elem| elem == EqShort) { 99 | true => EqShort, 100 | false => NeqShort 101 | } 102 | } 103 | 104 | pub fn ss_and(ss1 : ShortCircuit, ss2 : ShortCircuit) -> ShortCircuit { 105 | match ss1 { 106 | EqShort => ss2, 107 | NeqShort => NeqShort, 108 | Unknown => Unknown 109 | } 110 | } 111 | 112 | #[derive(Debug, Copy, Clone)] 113 | pub enum Either { 114 | Left(L), 115 | Right(R), 116 | } 117 | 118 | /// HashMap based cache; given two expressions, will tell you whether 119 | /// the TypeChecker has seen this particular pair before, and if so, 120 | /// what the result of a definitional equality comparison was. 121 | /// HashMap<(Expr, Expr), ShortCircuit> would be more intuitive, but 122 | /// would require cloning both keys on every lookup due to the memory 123 | /// layout of tuples. 124 | #[derive(Clone)] 125 | pub struct EqCache { 126 | inner : HashMap> 127 | } 128 | 129 | impl EqCache { 130 | pub fn with_capacity(n : usize) -> Self { 131 | EqCache { 132 | inner : HashMap::with_capacity(n) 133 | } 134 | } 135 | 136 | pub fn get(&self, e1 : &Expr, e2 : &Expr) -> Option { 137 | match self.inner.get(e1) { 138 | None => match self.inner.get(e2) { 139 | Some(v) => v.iter().find(|(a, _)| a == e1).map(|(_, b)| b.clone()), 140 | None => return None 141 | }, 142 | Some(v) => { 143 | v.iter().find(|(a, _)| a == e2).map(|(_, b)| b.clone()) 144 | } 145 | } 146 | } 147 | 148 | pub fn insert(&mut self, e : Expr, ee : Expr, val : ShortCircuit) { 149 | match self.inner.get_mut(&e) { 150 | Some(v) => { 151 | v.push((ee, val)); 152 | }, 153 | None => { 154 | let mut v = Vec::with_capacity(10); 155 | v.push((ee, val)); 156 | self.inner.insert(e, v); 157 | } 158 | } 159 | } 160 | } 161 | 162 | 163 | 164 | /// Queue backed by a thread-safe VecDeque. 165 | #[derive(Debug, Clone)] 166 | pub struct RwQueue(Arc>>); 167 | 168 | impl RwQueue { 169 | pub fn with_capacity(n : usize) -> Self { 170 | let inner = VecDeque::with_capacity(n); 171 | RwQueue(Arc::new(RwLock::new(inner))) 172 | } 173 | 174 | pub fn push(&self, t : T) { 175 | match self { 176 | RwQueue(inner) => inner.write().push_back(t) 177 | } 178 | } 179 | 180 | pub fn pop(&self) -> Option { 181 | match self { 182 | RwQueue(inner) => inner.write().pop_front() 183 | } 184 | } 185 | } 186 | 187 | pub type QueueMsg = Either; 188 | 189 | pub type ModQueue = RwQueue>; 190 | pub type CompiledQueue = RwQueue>; 191 | --------------------------------------------------------------------------------