├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── README.md └── src ├── bin ├── checker.rs ├── compactor.rs └── kravanenn.rs ├── coq ├── checker │ ├── checker.rs │ ├── closure.rs │ ├── declarations.rs │ ├── environ.rs │ ├── inductive.rs │ ├── mod.rs │ ├── reduction.rs │ ├── term.rs │ └── univ.rs ├── kernel │ ├── esubst.rs │ ├── mod.rs │ └── names.rs ├── lib │ ├── c_array.rs │ ├── c_map.rs │ ├── hashcons.rs │ ├── hashset.rs │ └── mod.rs └── mod.rs ├── hopcroft ├── hopcroft.rs ├── mod.rs └── partition.rs ├── lib.rs ├── ocaml ├── compact.rs ├── de.rs ├── marshal.rs ├── mod.rs ├── values.rs └── votour.rs └── util ├── borrow.rs ├── ghost_cell.rs └── mod.rs /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "bitflags" 3 | version = "0.7.0" 4 | source = "registry+https://github.com/rust-lang/crates.io-index" 5 | 6 | [[package]] 7 | name = "bitflags" 8 | version = "1.0.1" 9 | source = "registry+https://github.com/rust-lang/crates.io-index" 10 | 11 | [[package]] 12 | name = "byteorder" 13 | version = "0.5.3" 14 | source = "registry+https://github.com/rust-lang/crates.io-index" 15 | 16 | [[package]] 17 | name = "coco" 18 | version = "0.1.1" 19 | source = "registry+https://github.com/rust-lang/crates.io-index" 20 | dependencies = [ 21 | "either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", 22 | "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", 23 | ] 24 | 25 | [[package]] 26 | name = "crossbeam" 27 | version = "0.3.0" 28 | source = "registry+https://github.com/rust-lang/crates.io-index" 29 | 30 | [[package]] 31 | name = "cuckoo" 32 | version = "0.1.0" 33 | source = "git+https://github.com/pythonesque/libcuckoo.rs?branch=wip#340b0c5ccbdbfb7c768b535fbdd50c1d5d104d18" 34 | dependencies = [ 35 | "crossbeam 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", 36 | "num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", 37 | "rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", 38 | ] 39 | 40 | [[package]] 41 | name = "either" 42 | version = "1.4.0" 43 | source = "registry+https://github.com/rust-lang/crates.io-index" 44 | 45 | [[package]] 46 | name = "fixedbitset" 47 | version = "0.1.8" 48 | source = "registry+https://github.com/rust-lang/crates.io-index" 49 | 50 | [[package]] 51 | name = "fuchsia-zircon" 52 | version = "0.2.1" 53 | source = "registry+https://github.com/rust-lang/crates.io-index" 54 | dependencies = [ 55 | "fuchsia-zircon-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", 56 | ] 57 | 58 | [[package]] 59 | name = "fuchsia-zircon-sys" 60 | version = "0.2.0" 61 | source = "registry+https://github.com/rust-lang/crates.io-index" 62 | dependencies = [ 63 | "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", 64 | ] 65 | 66 | [[package]] 67 | name = "kravanenn" 68 | version = "0.0.1" 69 | dependencies = [ 70 | "bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", 71 | "byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", 72 | "cuckoo 0.1.0 (git+https://github.com/pythonesque/libcuckoo.rs?branch=wip)", 73 | "fixedbitset 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", 74 | "lazy-init 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", 75 | "light_arena 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", 76 | "movecell 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", 77 | "rayon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", 78 | "serde 1.0.21 (registry+https://github.com/rust-lang/crates.io-index)", 79 | "serde_derive 1.0.21 (registry+https://github.com/rust-lang/crates.io-index)", 80 | "serde_derive_state 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", 81 | "serde_state 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", 82 | "smallvec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", 83 | "take_mut 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", 84 | "typed-arena 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", 85 | "vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", 86 | ] 87 | 88 | [[package]] 89 | name = "lazy-init" 90 | version = "0.2.0" 91 | source = "registry+https://github.com/rust-lang/crates.io-index" 92 | 93 | [[package]] 94 | name = "lazy_static" 95 | version = "0.2.11" 96 | source = "registry+https://github.com/rust-lang/crates.io-index" 97 | 98 | [[package]] 99 | name = "libc" 100 | version = "0.2.33" 101 | source = "registry+https://github.com/rust-lang/crates.io-index" 102 | 103 | [[package]] 104 | name = "light_arena" 105 | version = "0.1.1" 106 | source = "registry+https://github.com/rust-lang/crates.io-index" 107 | 108 | [[package]] 109 | name = "movecell" 110 | version = "0.2.0" 111 | source = "registry+https://github.com/rust-lang/crates.io-index" 112 | 113 | [[package]] 114 | name = "num_cpus" 115 | version = "0.2.13" 116 | source = "registry+https://github.com/rust-lang/crates.io-index" 117 | dependencies = [ 118 | "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", 119 | ] 120 | 121 | [[package]] 122 | name = "num_cpus" 123 | version = "1.7.0" 124 | source = "registry+https://github.com/rust-lang/crates.io-index" 125 | dependencies = [ 126 | "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", 127 | ] 128 | 129 | [[package]] 130 | name = "quote" 131 | version = "0.3.15" 132 | source = "registry+https://github.com/rust-lang/crates.io-index" 133 | 134 | [[package]] 135 | name = "rand" 136 | version = "0.3.18" 137 | source = "registry+https://github.com/rust-lang/crates.io-index" 138 | dependencies = [ 139 | "fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", 140 | "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", 141 | ] 142 | 143 | [[package]] 144 | name = "rayon" 145 | version = "0.9.0" 146 | source = "registry+https://github.com/rust-lang/crates.io-index" 147 | dependencies = [ 148 | "either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", 149 | "rayon-core 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", 150 | ] 151 | 152 | [[package]] 153 | name = "rayon-core" 154 | version = "1.3.0" 155 | source = "registry+https://github.com/rust-lang/crates.io-index" 156 | dependencies = [ 157 | "coco 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", 158 | "lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", 159 | "libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", 160 | "num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", 161 | "rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", 162 | ] 163 | 164 | [[package]] 165 | name = "scopeguard" 166 | version = "0.3.3" 167 | source = "registry+https://github.com/rust-lang/crates.io-index" 168 | 169 | [[package]] 170 | name = "serde" 171 | version = "1.0.21" 172 | source = "registry+https://github.com/rust-lang/crates.io-index" 173 | 174 | [[package]] 175 | name = "serde_derive" 176 | version = "1.0.21" 177 | source = "registry+https://github.com/rust-lang/crates.io-index" 178 | dependencies = [ 179 | "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", 180 | "serde_derive_internals 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", 181 | "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", 182 | ] 183 | 184 | [[package]] 185 | name = "serde_derive_internals" 186 | version = "0.17.0" 187 | source = "registry+https://github.com/rust-lang/crates.io-index" 188 | dependencies = [ 189 | "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", 190 | "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", 191 | ] 192 | 193 | [[package]] 194 | name = "serde_derive_state" 195 | version = "0.4.1" 196 | source = "registry+https://github.com/rust-lang/crates.io-index" 197 | dependencies = [ 198 | "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", 199 | "serde_derive_state_internals 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", 200 | "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", 201 | ] 202 | 203 | [[package]] 204 | name = "serde_derive_state_internals" 205 | version = "0.4.0" 206 | source = "registry+https://github.com/rust-lang/crates.io-index" 207 | dependencies = [ 208 | "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", 209 | "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", 210 | ] 211 | 212 | [[package]] 213 | name = "serde_state" 214 | version = "0.4.1" 215 | source = "registry+https://github.com/rust-lang/crates.io-index" 216 | dependencies = [ 217 | "serde 1.0.21 (registry+https://github.com/rust-lang/crates.io-index)", 218 | ] 219 | 220 | [[package]] 221 | name = "smallvec" 222 | version = "0.5.0" 223 | source = "registry+https://github.com/rust-lang/crates.io-index" 224 | 225 | [[package]] 226 | name = "syn" 227 | version = "0.11.11" 228 | source = "registry+https://github.com/rust-lang/crates.io-index" 229 | dependencies = [ 230 | "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", 231 | "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)", 232 | "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", 233 | ] 234 | 235 | [[package]] 236 | name = "synom" 237 | version = "0.11.3" 238 | source = "registry+https://github.com/rust-lang/crates.io-index" 239 | dependencies = [ 240 | "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", 241 | ] 242 | 243 | [[package]] 244 | name = "take_mut" 245 | version = "0.2.0" 246 | source = "registry+https://github.com/rust-lang/crates.io-index" 247 | 248 | [[package]] 249 | name = "typed-arena" 250 | version = "1.3.0" 251 | source = "registry+https://github.com/rust-lang/crates.io-index" 252 | 253 | [[package]] 254 | name = "unicode-xid" 255 | version = "0.0.4" 256 | source = "registry+https://github.com/rust-lang/crates.io-index" 257 | 258 | [[package]] 259 | name = "vec_map" 260 | version = "0.8.0" 261 | source = "registry+https://github.com/rust-lang/crates.io-index" 262 | 263 | [metadata] 264 | "checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" 265 | "checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf" 266 | "checksum byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0fc10e8cc6b2580fda3f36eb6dc5316657f812a3df879a44a66fc9f0fdbc4855" 267 | "checksum coco 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c06169f5beb7e31c7c67ebf5540b8b472d23e3eade3b2ec7d1f5b504a85f91bd" 268 | "checksum crossbeam 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8837ab96533202c5b610ed44bc7f4183e7957c1c8f56e8cc78bb098593c8ba0a" 269 | "checksum cuckoo 0.1.0 (git+https://github.com/pythonesque/libcuckoo.rs?branch=wip)" = "" 270 | "checksum either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "740178ddf48b1a9e878e6d6509a1442a2d42fd2928aae8e7a6f8a36fb01981b3" 271 | "checksum fixedbitset 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "85cb8fec437468d86dc7c83ca7cfc933341d561873275f22dd5eedefa63a6478" 272 | "checksum fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f6c0581a4e363262e52b87f59ee2afe3415361c6ec35e665924eb08afe8ff159" 273 | "checksum fuchsia-zircon-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "43f3795b4bae048dc6123a6b972cadde2e676f9ded08aef6bb77f5f157684a82" 274 | "checksum lazy-init 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0979a1c8aaff1f4942a596f19c9a9fc85c3ee83a48644a94ca558aac9ac43773" 275 | "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" 276 | "checksum libc 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "5ba3df4dcb460b9dfbd070d41c94c19209620c191b0340b929ce748a2bcd42d2" 277 | "checksum light_arena 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a886ce2358566386500ec04e26a490e3033a5d8452747559bdaaa4955030bbc6" 278 | "checksum movecell 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "935eecf666e9e6813032d3075dd8a63c4cf1635bb499c72227853533615241f1" 279 | "checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3" 280 | "checksum num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "514f0d73e64be53ff320680ca671b64fe3fb91da01e1ae2ddc99eb51d453b20d" 281 | "checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" 282 | "checksum rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)" = "6475140dfd8655aeb72e1fd4b7a1cc1c202be65d71669476e392fe62532b9edd" 283 | "checksum rayon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ed02d09394c94ffbdfdc755ad62a132e94c3224a8354e78a1200ced34df12edf" 284 | "checksum rayon-core 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e64b609139d83da75902f88fd6c01820046840a18471e4dfcd5ac7c0f46bea53" 285 | "checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" 286 | "checksum serde 1.0.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6eda663e865517ee783b0891a3f6eb3a253e0b0dabb46418969ee9635beadd9e" 287 | "checksum serde_derive 1.0.21 (registry+https://github.com/rust-lang/crates.io-index)" = "652bc323d694dc925829725ec6c890156d8e70ae5202919869cb00fe2eff3788" 288 | "checksum serde_derive_internals 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)" = "32f1926285523b2db55df263d2aa4eb69ddcfa7a7eade6430323637866b513ab" 289 | "checksum serde_derive_state 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c35db53133812a6ef0882481ab7c1dd7483b75d2ce501c1da56d0847adce48fa" 290 | "checksum serde_derive_state_internals 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "61d3e53f4224a7889a0f04b488570d7889f4e8e2b6fd374b69e64124dd584bf9" 291 | "checksum serde_state 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bc342bb7ae33c04ccb11e3d329c98e212fb7d2960458840a4c4e0f70f549fda8" 292 | "checksum smallvec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "872c0ff227000041c520cca51e883b858d388ab0ecf646bab76f065cebaec025" 293 | "checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" 294 | "checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6" 295 | "checksum take_mut 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50b910a1174df4aeb5738e8a0e7253883cf7801de40d094175a5a557e487f4c5" 296 | "checksum typed-arena 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5934776c3ac1bea4a9d56620d6bf2d483b20d394e49581db40f187e1118ff667" 297 | "checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" 298 | "checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c" 299 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | 3 | name = "kravanenn" 4 | version = "0.0.1" 5 | authors = [ "Pierre-Marie Pédrot " ] 6 | 7 | [lib] 8 | name = "kravanenn" 9 | path = "src/lib.rs" 10 | 11 | [[bin]] 12 | name = "kravanenn" 13 | path = "src/bin/kravanenn.rs" 14 | doc = false 15 | 16 | [[bin]] 17 | name = "compactor" 18 | path = "src/bin/compactor.rs" 19 | doc = false 20 | 21 | [[bin]] 22 | name = "checker" 23 | path = "src/bin/checker.rs" 24 | doc = false 25 | 26 | [dependencies] 27 | byteorder = "0.5" 28 | fixedbitset = "0.1.8" 29 | serde = "1.0.16" 30 | serde_derive = "1.0.16" 31 | serde_state = "0.4.1" 32 | serde_derive_state = "0.4.1" 33 | light_arena = "0.1.1" 34 | vec_map = "0.8.0" 35 | typed-arena = "1.3.0" 36 | bitflags = "1.0.1" 37 | take_mut = "0.2.0" 38 | smallvec = "0.5.0" 39 | lazy-init = "0.2.0" 40 | rayon = "0.9.0" 41 | movecell = "0.2.0" 42 | cuckoo = { git = "https://github.com/pythonesque/libcuckoo.rs", branch = "wip" } 43 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kravanenn 2 | 3 | This is a Rust package providing a few tools for Coq. Most notably, there is currently: 4 | 5 | - A library that reimplements OCaml native serialization from the `Marshal` module in pure Rust. 6 | - A implementation of the Hopcroft algorithm tailored for the reduction of OCaml structures. See [this OCaml implementation](https://github.com/ppedrot/ocaml-compactor) for more details. 7 | - Stubs for browsing Coq `vo` object files 8 | -------------------------------------------------------------------------------- /src/bin/checker.rs: -------------------------------------------------------------------------------- 1 | extern crate kravanenn; 2 | extern crate serde; 3 | 4 | use std::fs::File; 5 | use std::io; 6 | use std::io::{Write, Seek, SeekFrom}; 7 | use std::str::FromStr; 8 | use kravanenn::*; 9 | use kravanenn::ocaml::values::{Opaques, LibSum, Any, UnivOpaques, Lib}; 10 | use kravanenn::coq::checker::checker::{LoadPath}; 11 | 12 | fn main () { 13 | let args : Vec = std::env::args().collect(); 14 | let lp = LoadPath::new(); 15 | kravanenn::coq::checker::checker::check(&lp, args.as_ref()); 16 | } 17 | -------------------------------------------------------------------------------- /src/bin/compactor.rs: -------------------------------------------------------------------------------- 1 | extern crate kravanenn; 2 | 3 | use std::fs::File; 4 | use std::io::{Seek, SeekFrom}; 5 | use kravanenn::ocaml::marshal; 6 | use kravanenn::ocaml::compact; 7 | 8 | macro_rules! try_fatal { 9 | ($e:expr) => { 10 | { 11 | match $e { 12 | Err (e) => { 13 | println!("Fatal error: {}", e); 14 | return (); 15 | }, 16 | Ok (ans) => ans, 17 | } 18 | } 19 | }; 20 | } 21 | 22 | fn main () { 23 | let args : Vec = std::env::args().collect(); 24 | match args.len() { 25 | 3 => (), 26 | _ => { println!("Usage: compact FILE OUTPUT"); return; }, 27 | } 28 | println!("Reading file {}...", args[1]); 29 | let mut file = try_fatal!(File::open(&args[1])); 30 | let segments = try_fatal!(marshal::read_file_summary(&mut file)); 31 | println!("Found {} segments.", segments.len()); 32 | // Back to the first segment, skipping the magic number 33 | let _ = try_fatal!(file.seek(SeekFrom::Start(4))); 34 | for i in 0..segments.len() { 35 | println!("Reading segment {}", i); 36 | let (_, seg) = try_fatal!(marshal::read_segment(&mut file)); 37 | let _ = compact::reduce(&seg); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/bin/kravanenn.rs: -------------------------------------------------------------------------------- 1 | extern crate kravanenn; 2 | extern crate serde; 3 | 4 | use std::fs::File; 5 | use std::io; 6 | use std::io::{Write, Seek, SeekFrom}; 7 | use std::str::FromStr; 8 | use kravanenn::*; 9 | use kravanenn::ocaml::values::{Opaques, LibSum, Any, UnivOpaques, Lib}; 10 | 11 | macro_rules! try_fatal { 12 | ($e:expr) => { 13 | { 14 | match $e { 15 | Err (e) => { 16 | println!("Fatal error: {}", e); 17 | return (); 18 | }, 19 | Ok (ans) => ans, 20 | } 21 | } 22 | }; 23 | } 24 | 25 | fn main () { 26 | let args : Vec = std::env::args().collect(); 27 | if args.len() != 2 { panic!("Invalid argument"); }; 28 | println!("Reading file {}...", args[1]); 29 | let mut file = match File::open(&args[1]) { 30 | Err (e) => { 31 | println!("Fatal error: {}", e); 32 | return (); 33 | }, 34 | Ok (f) => f, 35 | }; 36 | let segments = ocaml::marshal::read_file_summary(&mut file).unwrap(); 37 | println!("Found {} segments. Choose the one to visit.", segments.len()); 38 | let mut n : usize = 0; 39 | for header in segments.iter() { 40 | println!("{}: {}w", n, header.size64); 41 | n = n + 1; 42 | }; 43 | let mut buf = String::new(); 44 | let n; 45 | loop { 46 | print!("# "); 47 | let _ = io::stdout().flush(); 48 | let () = buf.clear(); 49 | let () = match io::stdin().read_line(&mut buf) { 50 | Ok (0) => return (), 51 | Ok (..) => (), 52 | Err (..) => return (), 53 | }; 54 | // Remove the EOL 55 | let _ = buf.pop(); 56 | if let Ok(n_) = usize::from_str(&mut buf) { 57 | if segments.get(n_).is_some() { 58 | n = n_; 59 | break 60 | } 61 | } 62 | println!("No such segment."); 63 | } 64 | println!("Reading segment n°{}...", n); 65 | let _ = try_fatal!(file.seek(SeekFrom::Start(4))); 66 | for _ in 0..n { 67 | ocaml::marshal::read_segment(&mut file).unwrap(); 68 | } 69 | let (_, ref obj) = try_fatal!(ocaml::marshal::read_segment(&mut file)); 70 | let mut seed = ocaml::de::Seed::new(&obj.memory); 71 | let sd : Result = ocaml::de::from_obj_state(obj, &mut seed); 72 | seed = ocaml::de::Seed::new(&obj.memory); 73 | let md : Result = ocaml::de::from_obj_state(obj, &mut seed); 74 | seed = ocaml::de::Seed::new(&obj.memory); 75 | let opaque_csts : Result = ocaml::de::from_obj_state(obj, &mut seed); 76 | seed = ocaml::de::Seed::new(&obj.memory); 77 | let discharging : Result, _> = ocaml::de::from_obj_state(obj, &mut seed); 78 | seed = ocaml::de::Seed::new(&obj.memory); 79 | let tasks : Result, _> = ocaml::de::from_obj_state(obj, &mut seed); 80 | seed = ocaml::de::Seed::new(&obj.memory); 81 | let table : Result = ocaml::de::from_obj_state(obj, &mut seed); 82 | drop(seed); 83 | println!("sd: {:?}", sd.is_ok()/*, format!("{:?}", sd).len()*/); 84 | println!("md: {:?}", md.is_ok()/*, format!("{:?}", md).len()*/); 85 | println!("opaque_csts: {:?}", opaque_csts.is_ok()/*, format!("{:?}", opaque_csts).len()*/); 86 | println!("discharging: {:?}", if let Ok(None) = discharging { true } else { false }); 87 | println!("tasks: {:?}", if let Ok(None) = tasks { true } else { false }); 88 | println!("table: {:?}", table.is_ok()/*, format!("{:?}", table).len()*/); 89 | let ocaml::marshal::Memory(ref mem) = obj.memory; 90 | ocaml::votour::visit_object(obj.entry, mem); 91 | } 92 | -------------------------------------------------------------------------------- /src/coq/checker/checker.rs: -------------------------------------------------------------------------------- 1 | extern crate serde; 2 | 3 | use std::str; 4 | use std::rc::Rc; 5 | use std::sync::Arc; 6 | use std::collections::{HashMap}; 7 | use std::collections::hash_map::{Entry}; 8 | use std::fs::File; 9 | use std::io; 10 | use std::io::{Write, Seek, SeekFrom}; 11 | use std::str::FromStr; 12 | use std::path::{PathBuf, Path}; 13 | use ocaml::values::{Opaques, LibSum, Any, UnivOpaques, Lib, Id, Dp, List}; 14 | use ocaml::de::{ORef, Str}; 15 | 16 | macro_rules! try_fatal { 17 | ($e:expr) => { 18 | { 19 | match $e { 20 | Err (e) => { 21 | println!("Fatal error: {}", e); 22 | return (); 23 | }, 24 | Ok (ans) => ans, 25 | } 26 | } 27 | }; 28 | } 29 | 30 | pub struct Error; 31 | 32 | pub struct LoadPath { 33 | f_map : HashMap, 34 | r_map : HashMap>, 35 | } 36 | 37 | fn split_dirpath(dp : &Dp) -> (&Id, &Dp) { 38 | // Dirpaths are ordered by decreasing order of generality, so the first node 39 | // of a module path is necessarily the name of the module. 40 | match dp { 41 | &List::Nil => panic!("Invalid library name"), 42 | &List::Cons(ORef(ref r1)) => { 43 | let (ref md, ref lib) = **r1; 44 | (md, lib) 45 | }, 46 | } 47 | } 48 | 49 | fn is_in_path(lib : &Dp, path : &Dp) -> bool { 50 | let (_, mut lib) = split_dirpath(lib); 51 | let mut path = path; 52 | loop { 53 | match (lib, path) { 54 | (&List::Nil, &List::Nil) => return true, 55 | (&List::Cons(ORef(ref r1)), &List::Cons(ORef(ref r2))) => { 56 | let (ref id1, ref nlib) = **r1; 57 | let (ref id2, ref npath) = **r2; 58 | if id1 == id2 { lib = nlib; path = npath } 59 | else { return false } 60 | }, 61 | (_, _) => return false, 62 | } 63 | } 64 | } 65 | 66 | impl LoadPath { 67 | 68 | pub fn new() -> Self { 69 | LoadPath { 70 | f_map : HashMap::new (), 71 | r_map : HashMap::new (), 72 | } 73 | } 74 | 75 | pub fn add(&mut self, s : PathBuf, dp : Dp) -> io::Result<()> { 76 | let s = try!(s.canonicalize()); 77 | match self.f_map.insert(s.clone(), dp.clone()) { 78 | None => (), 79 | Some(_) => (), // TODO: warning 80 | }; 81 | match self.r_map.entry(dp) { 82 | Entry::Occupied(ref mut e) => e.get_mut().push(s), 83 | Entry::Vacant(e) => { let _ = e.insert(vec!(s)); } , 84 | }; 85 | Ok(()) 86 | } 87 | 88 | // Associate a logical path to some physical path. 89 | fn locate_physical(&self, s : &Path) -> Option { 90 | let path = match s.parent() { 91 | None => return None, 92 | Some(s) => s, 93 | }; 94 | let name = match s.file_stem() { 95 | None => return None, 96 | Some(n) => 97 | match n.to_str() { 98 | None => return None, 99 | Some(n) => n, 100 | }, 101 | }; 102 | let lp = match self.f_map.get(path) { 103 | None => return None, 104 | Some(lp) => lp, 105 | }; 106 | let name = Str(Arc::new(String::from(name.clone()).into_bytes())); 107 | Some(List::Cons(ORef(Arc::new((name, lp.clone()))))) 108 | } 109 | 110 | // Associate a physical vo file to a logical path 111 | fn locate_logical(&self, dp : &Dp) -> Option { 112 | let def = []; 113 | let (md, path) = split_dirpath(dp); 114 | // TODO: use UTF8 strings as identifiers 115 | let mut md = String::from_utf8((**md).clone()).unwrap(); 116 | md.push_str(".vo"); 117 | let md = Path::new(&md); 118 | let dirs = match self.r_map.get(path) { 119 | None => def.as_ref(), 120 | Some(v) => v.as_ref(), 121 | }; 122 | for dir in dirs { 123 | let mut f = dir.clone(); 124 | // No need to canonicalize, dir should already be in canonical form. 125 | f.push(Path::new(md)); 126 | if f.is_file() { return Some(f); } 127 | } 128 | None 129 | } 130 | 131 | } 132 | 133 | fn load_file(f : &str) -> Result <(), Error> { 134 | // println!("Reading file {}...", args[1]); 135 | // let mut file = match File::open(&args[1]) { 136 | // Err (e) => { 137 | // println!("Fatal error: {}", e); 138 | // return (); 139 | // }, 140 | // Ok (f) => f, 141 | // }; 142 | // let () = try_fatal!(ocaml::marshal::read_magic(&mut file)); 143 | // // First segment: object file summary 144 | // let (_, ref obj) = try_fatal!(ocaml::marshal::read_segment(&mut file)); 145 | // let mut seed = ocaml::de::Seed::new(&obj.memory); 146 | // let sd : LibSum = try_fatal!(ocaml::de::from_obj_state(obj, &mut seed)); 147 | // // Second segment: library itself 148 | // let (_, ref obj) = try_fatal!(ocaml::marshal::read_segment(&mut file)); 149 | // let mut seed = ocaml::de::Seed::new(&obj.memory); 150 | // let md : Lib = try_fatal!(ocaml::de::from_obj_state(obj, &mut seed)); 151 | // // Third, fourth and fifth segments don't matter for checker 152 | // let () = try_fatal!(ocaml::marshal::skip_segment(&mut file)); 153 | // let () = try_fatal!(ocaml::marshal::skip_segment(&mut file)); 154 | // let () = try_fatal!(ocaml::marshal::skip_segment(&mut file)); 155 | // // Sixth segment: opaque table 156 | // let mut seed = ocaml::de::Seed::new(&obj.memory); 157 | // let table : Opaques = try_fatal!(ocaml::de::from_obj_state(obj, &mut seed)); 158 | // 159 | unimplemented!("yay"); 160 | } 161 | 162 | pub fn check (paths : &LoadPath, files : &[String]) -> Result<(), Error> { 163 | Ok(()) 164 | } 165 | -------------------------------------------------------------------------------- /src/coq/checker/declarations.rs: -------------------------------------------------------------------------------- 1 | use ocaml::de::{ 2 | ORef, 3 | }; 4 | use ocaml::values::{ 5 | Constr, 6 | Mp, 7 | MpResolver, 8 | Substituted, 9 | UId, 10 | }; 11 | use std::collections::{HashMap}; 12 | 13 | struct UMap<'b>(HashMap, HashMap<&'b UId, &'b MpResolver>); 14 | 15 | impl<'b> UMap<'b> { 16 | pub fn mps<'c>(&mut self, _c: &'c mut ORef) -> Constr { 17 | unimplemented!("mp substitution not yet implemented") 18 | } 19 | } 20 | 21 | impl Substituted> { 22 | fn force<'b, F>(&mut self, _fsubst: F) 23 | where F: for<'c> FnOnce(&mut UMap<'b>, &'c mut ORef) -> T, 24 | T: Clone, 25 | { 26 | let Substituted { ref mut subst, value: ref mut _value } = *self; 27 | if subst.len() != 0 { 28 | unimplemented!("Module substitutions are yet implemented") 29 | } 30 | } 31 | } 32 | 33 | impl Substituted> { 34 | pub fn force_constr(&mut self) { 35 | self.force(UMap::mps) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/coq/checker/environ.rs: -------------------------------------------------------------------------------- 1 | use coq::checker::univ::{ 2 | Huniv, 3 | SubstError, 4 | Universes, 5 | }; 6 | use coq::kernel::names::{ 7 | CMapEnv, 8 | KnKey, 9 | KnMap, 10 | KnUser, 11 | MindMapEnv, 12 | // MpMap, 13 | MutInd, 14 | }; 15 | use ocaml::values::{ 16 | Cb, 17 | Constr, 18 | Cst, 19 | CstDef, 20 | Engagement, 21 | Ind, 22 | IndPack, 23 | Kn, 24 | // ModType, 25 | // Module, 26 | ProjBody, 27 | PUniverses, 28 | // Rctxt, 29 | RDecl, 30 | // VoDigest, 31 | }; 32 | use std::borrow::Cow; 33 | 34 | /// Environments 35 | 36 | #[derive(Default)] 37 | pub struct Globals<'g> { 38 | constants: CMapEnv<'g, &'g Cb>, 39 | inductives: MindMapEnv<'g, &'g IndPack>, 40 | inductives_eq: KnMap<'g, Kn>, 41 | /// Hash-consed universe table. 42 | univ_hcons_tbl: Huniv, 43 | // modules: MpMap, 44 | // modtypes: MpMap, 45 | } 46 | 47 | pub struct Stratification { 48 | universes: Universes, 49 | enga: Engagement, 50 | } 51 | 52 | pub struct Env<'b, 'g> where 'g: 'b { 53 | /// Will let us easily keep the globals the same (without copying) while recreating the 54 | /// rel_context. We want to divorce the rel_context lifetimes from the global lifetimes 55 | /// so we can drop the Env without unifying the lifetime of the globals with it. 56 | pub globals: &'b mut Globals<'g>, 57 | /// NOTE: We will probably make this something we clone somewhat regularly, since we often 58 | /// want to keep the rest of the Env the same but mutate the Rctxt. So we might make this 59 | /// into a &'r mut Rctx<'b> or something. 60 | /// NOTE: We currently use Vec instead of RCtxt, just because it's somewhat easier 61 | /// to deal with. We can always change it later. 62 | pub rel_context: &'b mut Vec, 63 | pub stratification: Stratification, 64 | // imports: MpMap, 65 | } 66 | 67 | #[derive(Clone, Copy, Debug, Eq, PartialEq)] 68 | pub enum ConstEvaluationResult { 69 | NoBody, 70 | Opaque, 71 | Subst(SubstError), 72 | } 73 | 74 | #[derive(Clone, Debug, Eq, PartialEq)] 75 | pub enum EnvError { 76 | Anomaly(String), 77 | NotEvaluableConst(ConstEvaluationResult), 78 | } 79 | 80 | pub type EnvResult = Result; 81 | 82 | impl ::std::convert::From for ConstEvaluationResult { 83 | fn from(e: SubstError) -> Self { 84 | ConstEvaluationResult::Subst(e) 85 | } 86 | } 87 | 88 | impl<'g> Globals<'g> where { 89 | /// Constants 90 | 91 | /// Global constants 92 | pub fn lookup_constant(&self, c: &Cst) -> Option<&'g Cb> { 93 | self.constants.get(&KnUser(c)).map( |&cb| cb ) 94 | } 95 | 96 | pub fn constant_value(&self, o: &PUniverses) -> 97 | Option, ConstEvaluationResult>> 98 | { 99 | let PUniverses(ref kn, ref u) = *o; 100 | self.lookup_constant(kn) 101 | .and_then( |cb| { 102 | Some(match cb.body { 103 | CstDef::Def(ref l_body) => { 104 | // l_body is lazily initialized, and this is the only place that tries to 105 | // force it. 106 | let b = l_body.get_or_create( |mut l_body| { 107 | l_body.force_constr(); 108 | if cb.polymorphic { 109 | // FIXME: Why do we do this twice? 110 | l_body.force_constr(); 111 | } 112 | l_body.value 113 | }); 114 | if cb.polymorphic { 115 | match b.subst_instance(u, &self.univ_hcons_tbl) { 116 | Ok(b) => Ok(b), 117 | Err(e) => Err(ConstEvaluationResult::Subst(e)), 118 | } 119 | } else { 120 | Ok(Cow::Borrowed(&**b)) 121 | } 122 | }, 123 | CstDef::OpaqueDef(_) => 124 | Err(ConstEvaluationResult::NoBody), 125 | CstDef::Undef(_) => 126 | Err(ConstEvaluationResult::Opaque), 127 | }) 128 | }) 129 | } 130 | 131 | pub fn lookup_projection(&self, p: &Cst) -> Option> { 132 | // NOTE: Altered from OCaml implementation to not require p to be a Proj, since sometimes 133 | // we only have a constant (for instance, when checking a projection invented for eta 134 | // expansion of primitive records). 135 | self.lookup_constant(&p) 136 | .map( |p| p.proj.as_ref().ok_or_else( || { 137 | let e = "lookup_projection: constant is not a projection"; 138 | EnvError::Anomaly(e.into()) 139 | })) 140 | } 141 | 142 | /// Inductives 143 | 144 | /// Mutual Inductives 145 | fn scrape_mind<'a>(&'a self, kn: &'a Kn) -> &'a Kn { 146 | self.inductives_eq.get(&KnKey(kn)).unwrap_or(kn) 147 | } 148 | 149 | pub fn mind_equiv(&self, ind1: &Ind, ind2: &Ind) -> bool { 150 | ind1.pos == ind2.pos && 151 | self.scrape_mind(ind1.name.user()).equal(self.scrape_mind(ind2.name.user())) 152 | } 153 | 154 | pub fn lookup_mind(&self, kn: &MutInd) -> Option<&'g IndPack> 155 | { 156 | self.inductives.get(&KnUser(kn)).map( |&v| v ) 157 | } 158 | } 159 | 160 | impl Stratification { 161 | pub fn universes(&self) -> &Universes { 162 | &self.universes 163 | } 164 | 165 | pub fn engagement(&self) -> &Engagement { 166 | &self.enga 167 | } 168 | } 169 | 170 | impl<'b, 'g> Env<'b, 'g> { 171 | pub fn push_rel(&mut self, d: RDecl) { 172 | self.rel_context.push(d); 173 | } 174 | } 175 | -------------------------------------------------------------------------------- /src/coq/checker/inductive.rs: -------------------------------------------------------------------------------- 1 | use coq::checker::environ::{ 2 | Env, 3 | }; 4 | use coq::checker::reduction::{ 5 | ConvResult, 6 | }; 7 | use ocaml::values::{ 8 | Constr, 9 | Ind, 10 | PUniverses, 11 | }; 12 | 13 | /// Extracting an inductive type from a construction 14 | 15 | impl Constr { 16 | /// This API is weird; it mutates self in place. This is done in order to allow the argument 17 | /// vector returned by find_rectype to point inside of self. We could avoid this in various 18 | /// ways (including not allocating a vector at all) but the best solutions probably look more 19 | /// like this, so we just bite the bullet. 20 | /// 21 | /// Returns None if this does not reduce to an application of an inductive type. 22 | /// 23 | /// self should be typechecked beforehand! 24 | pub fn find_rectype(&mut self, env: &mut Env) -> 25 | ConvResult, Vec<&Constr>)>> 26 | { 27 | // TODO: If everything applied to reverse-order arg lists, we could use a more efficient 28 | // method here and use an iterator instead of allocating a Vec. 29 | self.whd_all(env)?; 30 | let (t, l) = self.decompose_app(); 31 | Ok(match *t { 32 | Constr::Ind(ref o) => Some((&**o, l)), 33 | _ => None 34 | }) 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/coq/checker/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod closure; 2 | pub mod declarations; 3 | pub mod environ; 4 | pub mod inductive; 5 | pub mod reduction; 6 | pub mod term; 7 | pub mod univ; 8 | pub mod checker; 9 | -------------------------------------------------------------------------------- /src/coq/checker/term.rs: -------------------------------------------------------------------------------- 1 | use coq::checker::univ::{ 2 | Huniv, 3 | SubstResult, 4 | }; 5 | use coq::kernel::esubst::{Idx, IdxError, Lift, IdxResult}; 6 | use core::convert::TryFrom; 7 | use core::nonzero::NonZero; 8 | use ocaml::de::{ORef, Array}; 9 | use ocaml::values::{ 10 | CoFix, 11 | Constr, 12 | Cst, 13 | Fix, 14 | Fix2, 15 | Ind, 16 | Instance, 17 | Name, 18 | PRec, 19 | PUniverses, 20 | RDecl, 21 | Sort, 22 | SortContents, 23 | Univ, 24 | }; 25 | use std::borrow::{Cow}; 26 | use std::cell::Cell; 27 | use std::option::{NoneError}; 28 | use std::sync::{Arc}; 29 | 30 | #[derive(Clone,Copy)] 31 | pub enum Info { 32 | Closed, 33 | Open, 34 | Unknown, 35 | } 36 | 37 | /// Exception raised if a variable lookup is out of range. 38 | pub enum SubstError { 39 | LocalOccur, 40 | Idx(IdxError), 41 | } 42 | 43 | pub struct Substituend { 44 | info: Cell, 45 | it: A, 46 | } 47 | 48 | pub type Arity = (Vec, ORef); 49 | 50 | impl Substituend { 51 | pub fn make(c: A) -> Self { 52 | Substituend { 53 | info: Cell::new(Info::Unknown), 54 | it: c, 55 | } 56 | } 57 | } 58 | 59 | impl ::std::convert::From for SubstError { 60 | fn from(e: IdxError) -> Self { 61 | SubstError::Idx(e) 62 | } 63 | } 64 | 65 | impl<'a> Substituend<&'a Constr> { 66 | /// 1st : general case 67 | pub fn lift(&self, depth: Idx) -> IdxResult { 68 | match self.info.get() { 69 | Info::Closed => Ok(self.it.clone()), 70 | Info::Open => self.it.lift(depth), 71 | Info::Unknown => { 72 | self.info.set(if self.it.closed0()? { Info::Closed } else { Info::Open }); 73 | // Recursion is okay here since it can only loop once. 74 | self.lift(depth) 75 | }, 76 | } 77 | } 78 | } 79 | 80 | impl Univ { 81 | fn sort_of(&self) -> Sort { 82 | if self.is_type0m() { Sort::Prop(SortContents::Null) } 83 | else if self.is_type0() { Sort::Prop(SortContents::Pos) } 84 | else { Sort::Type(ORef(Arc::new(self.clone()))) } 85 | } 86 | } 87 | 88 | impl Constr { 89 | /// Constructions as implemented 90 | 91 | pub fn strip_outer_cast(&self) -> &Self { 92 | let mut c = self; 93 | while let Constr::Cast(ref o) = *c { 94 | let (ref c_, _, _) = **o; 95 | c = c_; 96 | } 97 | c 98 | } 99 | 100 | /// Warning: returned argument order is reversed from the OCaml implementation! 101 | /// 102 | /// We could also consider a VecDeque, but we only ever append one way so it seems like a 103 | /// waste... 104 | pub fn collapse_appl<'a>(&'a self, cl: &'a [Self]) -> (&Self, Vec<&Self>) { 105 | // FIXME: Consider a non-allocating version that works as an intrusive iterator, or a 106 | // reversed iterator; both would suffice for our purposes here. 107 | let mut f = self; 108 | // Argument order is reversed, so extending to the right is prepending. 109 | let mut cl2: Vec<&Self> = cl.iter().collect(); 110 | while let Constr::App(ref o) = *f.strip_outer_cast() { 111 | let (ref g, ref cl1) = **o; 112 | f = g; 113 | cl2.extend(cl1.iter().rev()); 114 | } 115 | (f, cl2) 116 | } 117 | 118 | /// This method has arguments in the same order as the OCaml. 119 | pub fn decompose_app(&self) -> (&Self, Vec<&Self>) { 120 | if let Constr::App(ref o) = *self { 121 | let (ref f, ref cl) = **o; 122 | let (f, mut cl) = f.collapse_appl(cl); 123 | cl.reverse(); 124 | (f, cl) 125 | } else { 126 | (self, Vec::new()) 127 | } 128 | } 129 | 130 | pub fn applist(self, l: Vec) -> Constr { 131 | Constr::App(ORef(Arc::from((self, Array(Arc::from(l)))))) 132 | } 133 | 134 | /// Functions for dealing with Constr terms 135 | 136 | /// Occurring 137 | 138 | pub fn iter_with_binders(&self, g: G, f: F, l: &T) -> Result<(), E> 139 | where 140 | T: Clone, 141 | G: Fn(&mut T) -> Result<(), E>, 142 | F: Fn(&Constr, &T) -> Result<(), E>, 143 | { 144 | Ok(match *self { 145 | Constr::Rel(_) | Constr::Sort(_) | Constr::Const(_) | Constr::Ind(_) 146 | | Constr::Construct(_) => (), 147 | Constr::Cast(ref o) => { 148 | let (ref c, _, ref t) = **o; 149 | f(c, l)?; 150 | f(t, l)?; 151 | }, 152 | Constr::Prod(ref o) => { 153 | let (_, ref t, ref c) = **o; 154 | f(t, l)?; 155 | let mut l = l.clone(); // expensive 156 | g(&mut l)?; 157 | f(c, &l)?; 158 | }, 159 | Constr::Lambda(ref o) => { 160 | let (_, ref t, ref c) = **o; 161 | f(t, l)?; 162 | let mut l = l.clone(); // expensive 163 | g(&mut l)?; 164 | f(c, &l)?; 165 | }, 166 | Constr::LetIn(ref o) => { 167 | let (_, ref b, ref t, ref c) = **o; 168 | f(b, l)?; 169 | f(t, l)?; 170 | let mut l = l.clone(); // expensive 171 | g(&mut l)?; 172 | f(c, &l)?; 173 | }, 174 | Constr::App(ref o) => { 175 | let (ref c, ref al) = **o; 176 | f(c, l)?; 177 | for x in al.iter() { 178 | f(x, l)?; 179 | } 180 | }, 181 | // | Evar (e,al) -> Array.iter (f n) l, 182 | Constr::Case(ref o) => { 183 | let (_, ref p, ref c, ref bl) = **o; 184 | f(p, l)?; 185 | f(c, l)?; 186 | for x in bl.iter() { 187 | f(x, l)?; 188 | } 189 | }, 190 | Constr::Fix(ref o) => { 191 | let Fix(_, PRec(_, ref tl, ref bl)) = **o; 192 | let len = tl.len(); 193 | for x in tl.iter() { 194 | f(x, l)?; 195 | } 196 | let mut l = l.clone(); // expensive 197 | for _ in 0..len { 198 | g(&mut l)?; 199 | } 200 | for x in bl.iter() { 201 | f(x, &l)?; 202 | } 203 | }, 204 | Constr::CoFix(ref o) => { 205 | let CoFix(_, PRec(_, ref tl, ref bl)) = **o; 206 | let len = tl.len(); 207 | for x in tl.iter() { 208 | f(x, l)?; 209 | } 210 | let mut l = l.clone(); // expensive 211 | for _ in 0..len { 212 | g(&mut l)?; 213 | } 214 | for x in bl.iter() { 215 | f(x, &l)?; 216 | } 217 | }, 218 | Constr::Proj(ref o) => { 219 | let (_, ref c) = **o; 220 | f(c, l)?; 221 | }, 222 | // Constr::Meta(_) | Constr::Var(_) | Constr::Evar(_) => unreachable!("") 223 | }) 224 | } 225 | 226 | 227 | /// (closedn n M) raises LocalOccur if a variable of height greater than n 228 | /// occurs in M, returns () otherwise 229 | fn closed_rec(&self, n: &i64) -> Result<(), SubstError> { 230 | match *self { 231 | Constr::Rel(m) if m > *n => Err(SubstError::LocalOccur), 232 | _ => self.iter_with_binders(|i| { 233 | *i = i.checked_add(1).ok_or(SubstError::Idx(IdxError::from(NoneError)))?; 234 | return Ok(()) 235 | }, Self::closed_rec, n), 236 | } 237 | } 238 | 239 | pub fn closedn(&self, n: i64) -> IdxResult { 240 | match self.closed_rec(&n) { 241 | Ok(()) => Ok(true), 242 | Err(SubstError::LocalOccur) => Ok(false), 243 | Err(SubstError::Idx(e)) => Err(e), 244 | } 245 | } 246 | 247 | /// [closed0 M] is true iff [M] is a (deBruijn) closed term 248 | pub fn closed0(&self) -> IdxResult { 249 | self.closedn(0) 250 | } 251 | 252 | /// Lifting 253 | pub fn map_constr_with_binders(&self, g: G, f: F, l: &T) -> Result 254 | where 255 | T: Clone, 256 | G: Fn(&mut T) -> Result<(), E>, 257 | F: Fn(&Constr, &T) -> Result, 258 | { 259 | Ok(match *self { 260 | Constr::Rel(_) | Constr::Sort(_) | Constr::Const(_) | Constr::Ind(_) 261 | | Constr::Construct(_) => self.clone(), 262 | Constr::Cast(ref o) => { 263 | let (ref c, k, ref t) = **o; 264 | let c = f(c, l)?; 265 | let t = f(t, l)?; 266 | Constr::Cast(ORef(Arc::from((c, k, t)))) 267 | }, 268 | Constr::Prod(ref o) => { 269 | let (ref na, ref t, ref c) = **o; 270 | let t = f(t, l)?; 271 | let mut l = l.clone(); // expensive 272 | g(&mut l)?; 273 | let c = f(c, &l)?; 274 | Constr::Prod(ORef(Arc::from((na.clone(), t, c)))) 275 | }, 276 | Constr::Lambda(ref o) => { 277 | let (ref na, ref t, ref c) = **o; 278 | let t = f(t, l)?; 279 | let mut l = l.clone(); // expensive 280 | g(&mut l)?; 281 | let c = f(c, &l)?; 282 | Constr::Lambda(ORef(Arc::from((na.clone(), t, c)))) 283 | }, 284 | Constr::LetIn(ref o) => { 285 | let (ref na, ref b, ref t, ref c) = **o; 286 | let b = f(b, l)?; 287 | let t = f(t, l)?; 288 | let mut l = l.clone(); // expensive 289 | g(&mut l)?; 290 | let c = f(c, &l)?; 291 | Constr::LetIn(ORef(Arc::from((na.clone(), b, t, c)))) 292 | }, 293 | Constr::App(ref o) => { 294 | let (ref c, ref al) = **o; 295 | let c = f(c, l)?; 296 | // expensive -- allocates a Vec 297 | let al: Result, _> = al.iter().map( |x| f(x, l) ).collect(); 298 | Constr::App(ORef(Arc::from((c, Array(Arc::from(al?)))))) 299 | }, 300 | // | Evar (e,al) -> Evar (e, Array.map (f l) al) 301 | Constr::Case(ref o) => { 302 | let (ref ci, ref p, ref c, ref bl) = **o; 303 | let p = f(p, l)?; 304 | let c = f(c, l)?; 305 | // expensive -- allocates a Vec 306 | let bl: Result, _> = bl.iter().map( |x| f(x, l) ).collect(); 307 | Constr::Case(ORef(Arc::from((ci.clone(), p, c, Array(Arc::from(bl?)))))) 308 | }, 309 | Constr::Fix(ref o) => { 310 | let Fix(ref ln, PRec(ref lna, ref tl, ref bl)) = **o; 311 | let len = tl.len(); 312 | // expensive -- allocates a Vec 313 | let tl: Result, _> = tl.iter().map( |x| f(x, l) ).collect(); 314 | let mut l = l.clone(); // expensive 315 | for _ in 0..len { 316 | g(&mut l)?; 317 | } 318 | // expensive -- allocates a Vec 319 | let bl: Result, _> = bl.iter().map( |x| f(x, &l) ).collect(); 320 | Constr::Fix(ORef(Arc::from(Fix(ln.clone(), 321 | PRec(lna.clone(), 322 | Array(Arc::from(tl?)), 323 | Array(Arc::from(bl?))))))) 324 | }, 325 | Constr::CoFix(ref o) => { 326 | let CoFix(ln, PRec(ref lna, ref tl, ref bl)) = **o; 327 | let len = tl.len(); 328 | // expensive -- allocates a Vec 329 | let tl: Result, _> = tl.iter().map( |x| f(x, l) ).collect(); 330 | let mut l = l.clone(); // expensive 331 | for _ in 0..len { 332 | g(&mut l)?; 333 | } 334 | // expensive -- allocates a Vec 335 | let bl: Result, _> = bl.iter().map( |x| f(x, &l) ).collect(); 336 | Constr::CoFix(ORef(Arc::from(CoFix(ln.clone(), 337 | PRec(lna.clone(), 338 | Array(Arc::from(tl?)), 339 | Array(Arc::from(bl?))))))) 340 | }, 341 | Constr::Proj(ref o) => { 342 | let (ref p, ref c) = **o; 343 | let c = f(c, l)?; 344 | Constr::Proj(ORef(Arc::from((p.clone(), c)))) 345 | }, 346 | // Constr::Meta(_) | Constr::Var(_) | Constr::Evar(_) => unreachable!("") 347 | }) 348 | } 349 | 350 | /// The generic lifting function 351 | pub fn exliftn(&self, el: &Lift) -> IdxResult { 352 | match *self { 353 | Constr::Rel(i) => 354 | Ok(Constr::Rel(i32::from(el.reloc_rel(Idx::new(NonZero::new(i)?)?)?) as i64)), 355 | _ => self.map_constr_with_binders(Lift::lift, Self::exliftn, el) 356 | } 357 | } 358 | 359 | /// Lifting the binding depth across k bindings 360 | 361 | pub fn liftn(&self, k: Idx, n: Idx) -> IdxResult { 362 | let mut el = Lift::id(); 363 | el.shift(k)?; 364 | if let Some(n) = n.checked_sub(Idx::ONE).expect("n > 0 - 1 ≥ 0") { 365 | el.liftn(n)?; 366 | } 367 | if el.is_id() { 368 | Ok(self.clone()) 369 | } else { 370 | self.exliftn(&el) 371 | } 372 | } 373 | 374 | pub fn lift(&self, k: Idx) -> IdxResult { 375 | self.liftn(k, Idx::ONE) 376 | } 377 | 378 | /// Substituting 379 | 380 | fn substrec(&self, 381 | &(depth, ref lamv): &(Option, &[Substituend<&Constr>])) -> IdxResult { 382 | match *self { 383 | Constr::Rel(k_) => { 384 | // FIXME: For below, ensure u32 to usize is always a valid cast. 385 | let d = depth.map(u32::from).unwrap_or(0) as usize; 386 | // NOTE: This can only fail if we compile with addresses under 64 bits. 387 | let k = usize::try_from(k_)?; 388 | // After the above, we know k is a valid non-negative i64. 389 | if k <= d { 390 | Ok(self.clone()) 391 | } else if let Some(sub) = lamv.get(k - d - 1) { 392 | // Note that k - d above is valid (and > 0) because 0 ≤ d < k; 393 | // therefore, 0 ≤ k - depth - 1, so that is valid. 394 | // Also, the unwrap() below is granted because 0 < k. 395 | // FIXME: There is a better way of dealing with this. 396 | sub.lift(depth.unwrap()) 397 | } else { 398 | // k - lamv.len() is valid (and > 0) because if lamv.get(k - d - 1) = None, 399 | // lamv.len() ≤ k - d - 1 < k - d ≤ k (i.e. lamv.len() < k), so 400 | // 0 < k - lamv.len() (k - lamv.len() is positive). 401 | // Additionally, since we know 0 < lamv.len() < k, and k is a valid positive 402 | // i64, lamv.len() is also a valid positive i64. 403 | // So the cast is valid. 404 | Ok(Constr::Rel(k_ - lamv.len() as i64)) 405 | } 406 | }, 407 | _ => self.map_constr_with_binders( 408 | |&mut (ref mut depth, _)| { 409 | *depth = match *depth { 410 | None => Some(Idx::ONE), 411 | Some(depth) => Some(depth.checked_add(Idx::ONE)?), 412 | }; 413 | Ok(()) 414 | }, 415 | Self::substrec, 416 | &(depth, lamv) 417 | ) 418 | } 419 | } 420 | 421 | /// (subst1 M c) substitutes M for Rel(1) in c 422 | /// we generalise it to (substl [M1,...,Mn] c) which substitutes in parallel 423 | /// M1,...,Mn for respectively Rel(1),...,Rel(n) in c 424 | pub fn substn_many(&self, lamv: &[Substituend<&Constr>], n: Option) -> IdxResult { 425 | let lv = lamv.len(); 426 | if lv == 0 { return Ok(self.clone()) } 427 | else { self.substrec(&(n, lamv)) } 428 | } 429 | 430 | pub fn substnl(&self, laml: &[Constr], n: Option) -> IdxResult { 431 | let lamv: Vec<_> = laml.iter().map(Substituend::make).collect(); 432 | self.substn_many(&lamv, n) 433 | } 434 | 435 | pub fn substl(&self, laml: &[Constr]) -> IdxResult { 436 | self.substnl(laml, None) 437 | } 438 | 439 | pub fn subst1(&self, lam: &Constr) -> IdxResult { 440 | let lamv = [Substituend::make(lam)]; 441 | self.substn_many(&lamv, None) 442 | } 443 | 444 | /// Iterate lambda abstractions 445 | 446 | /* /// compose_lam [x1:T1;..;xn:Tn] b = [x1:T1]..[xn:Tn]b 447 | pub fn compose_lam(&self, l: I) 448 | where I: Iterator> { 449 | } */ 450 | /* val decompose_lam : constr -> (name * constr) list * constr */ 451 | /* 452 | let compose_lam l b = 453 | let rec lamrec = function 454 | | ([], b) -> b 455 | | ((v,t)::l, b) -> lamrec (l, Lambda (v,t,b)) 456 | in 457 | lamrec (l,b) */ 458 | 459 | /// Transforms a lambda term [x1:T1]..[xn:Tn]T into the pair 460 | /// ([(x1,T1);...;(xn,Tn)],T), where T is not a lambda 461 | /// 462 | /// (For technical reasons, we carry around the nested constructor as well, but we never need 463 | /// it). 464 | pub fn decompose_lam(&self) -> (Vec<&ORef<(Name, Constr, Constr)>>, &Constr) { 465 | let mut l = Vec::new(); 466 | let mut c = self; 467 | loop { 468 | match *c { 469 | Constr::Lambda(ref o) => { 470 | let (/*ref x, ref t*/_, _, ref c_) = **o; 471 | /* l.push((x, t)); */ 472 | l.push(o); 473 | c = c_; 474 | }, 475 | Constr::Cast(ref o) => { 476 | let (ref c_, _, _) = **o; 477 | c = c_; 478 | }, 479 | _ => { 480 | return (l, c) 481 | } 482 | } 483 | } 484 | } 485 | 486 | /// Alpha conversion functions 487 | 488 | /// alpha conversion : ignore print names and casts 489 | pub fn compare(&self, t2: &Self, f: F) -> bool 490 | where F: Fn(&Self, &Self) -> bool, 491 | { 492 | // FIXME: This is (in some cases) unnecessarily tail recursive. We could reduce the amount 493 | // of recursion required (and the likelihood that we'll get a stack overflow) by making the 494 | // function slightly less generic. 495 | match (self, t2) { 496 | (&Constr::Rel(n1), &Constr::Rel(n2)) => n1 == n2, 497 | // | Meta m1, Meta m2 -> Int.equal m1 m2 498 | // | Var id1, Var id2 -> Id.equal id1 id2 499 | (&Constr::Sort(ref s1), &Constr::Sort(ref s2)) => s1.compare(s2), 500 | (&Constr::Cast(ref o1), _) => { 501 | let (ref c1, _, _) = **o1; 502 | f(c1, t2) 503 | }, 504 | (_, &Constr::Cast(ref o2)) => { 505 | let (ref c2, _, _) = **o2; 506 | f(self, c2) 507 | }, 508 | (&Constr::Prod(ref o1), &Constr::Prod(ref o2)) => { 509 | let (_, ref t1, ref c1) = **o1; 510 | let (_, ref t2, ref c2) = **o2; 511 | f(t1, t2) && f(c1, c2) 512 | }, 513 | (&Constr::Lambda(ref o1), &Constr::Lambda(ref o2)) => { 514 | let (_, ref t1, ref c1) = **o1; 515 | let (_, ref t2, ref c2) = **o2; 516 | f(t1, t2) && f(c1, c2) 517 | }, 518 | (&Constr::LetIn(ref o1), &Constr::LetIn(ref o2)) => { 519 | let (_, ref b1, ref t1, ref c1) = **o1; 520 | let (_, ref b2, ref t2, ref c2) = **o2; 521 | f(b1, b2) && f(t1, t2) && f(c1, c2) 522 | }, 523 | (&Constr::App(ref o1), &Constr::App(ref o2)) => { 524 | let (ref c1, ref l1) = **o1; 525 | let (ref c2, ref l2) = **o2; 526 | if l1.len() == l2.len() { 527 | f(c1, c2) && l1.iter().zip(l2.iter()).all( |(x, y)| f(x, y) ) 528 | } else { 529 | // It's really sad that we have to allocate to perform this equality check in 530 | // linear time... 531 | // (we actually shouldn't, since we should be able to modify the nodes in-place 532 | // in order to reuse the existing memory, but fixing this might be more trouble 533 | // than it's worth). 534 | // FIXME: Alternative: a reversed iterator may be doable quite efficiently 535 | // (without allocating), especially since we don't really need to go in forward 536 | // order to do equality checks... 537 | let (h1, l1) = c1.collapse_appl(&***l1); 538 | let (h2, l2) = c2.collapse_appl(&***l2); 539 | // We currently check in the opposite order from the OCaml, since we use the 540 | // reversed method. This shouldn't matter in terms of results, but it might 541 | // affect performance... we could also iterate in reverse. 542 | if l1.len() == l2.len() { 543 | f(h1, h2) && l1.iter().zip(l2.iter()).all( |(x, y)| f(x, y) ) 544 | } else { false } 545 | } 546 | }, 547 | // | Evar (e1,l1), Evar (e2,l2) -> Int.equal e1 e2 && Array.equal f l1 l2 548 | (&Constr::Const(ref o1), &Constr::Const(ref o2)) => { 549 | let ref c1 = **o1; 550 | let ref c2 = **o2; 551 | c1.eq(c2, Cst::eq_con_chk) 552 | }, 553 | (&Constr::Ind(ref c1), &Constr::Ind(ref c2)) => c1.eq(c2, Ind::eq_ind_chk), 554 | (&Constr::Construct(ref o1), &Constr::Construct(ref o2)) => { 555 | let PUniverses(ref i1, ref u1) = **o1; 556 | let PUniverses(ref i2, ref u2) = **o2; 557 | i1.idx == i2.idx && i1.ind.eq_ind_chk(&i2.ind) && u1.equal(u2) 558 | }, 559 | (&Constr::Case(ref o1), &Constr::Case(ref o2)) => { 560 | let (_, ref p1, ref c1, ref bl1) = **o1; 561 | let (_, ref p2, ref c2, ref bl2) = **o2; 562 | f(p1, p2) && f(c1, c2) && 563 | bl1.len() == bl2.len() && bl1.iter().zip(bl2.iter()).all( |(x, y)| f(x, y)) 564 | }, 565 | (&Constr::Fix(ref o1), &Constr::Fix(ref o2)) => { 566 | let Fix(Fix2(ref ln1, i1), PRec(_, ref tl1, ref bl1)) = **o1; 567 | let Fix(Fix2(ref ln2, i2), PRec(_, ref tl2, ref bl2)) = **o2; 568 | i1 == i2 && 569 | ln1.len() == ln2.len() && ln1.iter().zip(ln2.iter()).all( |(x, y)| x == y) && 570 | tl1.len() == tl2.len() && tl1.iter().zip(tl2.iter()).all( |(x, y)| f(x, y) ) && 571 | bl1.len() == bl2.len() && bl1.iter().zip(bl2.iter()).all( |(x, y)| f(x, y) ) 572 | }, 573 | (&Constr::CoFix(ref o1), &Constr::CoFix(ref o2)) => { 574 | let CoFix(ln1, PRec(_, ref tl1, ref bl1)) = **o1; 575 | let CoFix(ln2, PRec(_, ref tl2, ref bl2)) = **o2; 576 | ln1 == ln2 && 577 | tl1.len() == tl2.len() && tl1.iter().zip(tl2.iter()).all( |(x, y)| f(x, y) ) && 578 | bl1.len() == bl2.len() && bl1.iter().zip(bl2.iter()).all( |(x, y)| f(x, y) ) 579 | }, 580 | (&Constr::Proj(ref o1), &Constr::Proj(ref o2)) => { 581 | let (ref p1, ref c1) = **o1; 582 | let (ref p2, ref c2) = **o2; 583 | p1.equal(p2) && f(c1, c2) 584 | }, 585 | (_, _) => false, 586 | } 587 | } 588 | 589 | pub fn eq(&self, n: &Self) -> bool { 590 | self as *const _ == n as *const _ || 591 | self.compare(n, Self::eq) 592 | } 593 | 594 | /// Universe substitutions 595 | pub fn subst_instance(&self, subst: &Instance, tbl: &Huniv) -> SubstResult> 596 | { 597 | if subst.is_empty() { Ok(Cow::Borrowed(self)) } 598 | else { 599 | // FIXME: We needlessly allocate in this function even if there were no changes, 600 | // due to the signature of map_constr_with_binders (and then waste time deallocating 601 | // all the stuff we just built after it's done). We could get away with not 602 | // performing any cloning etc. until we actually change something. 603 | fn aux(t: &Constr, env: &(&Instance, &Huniv, &Cell)) -> SubstResult { 604 | let (subst, tbl, ref changed) = *env; 605 | let f = |u| subst.subst_instance(u); 606 | match *t { 607 | Constr::Const(ref o) => { 608 | let PUniverses(ref c, ref u) = **o; 609 | return Ok( 610 | if u.is_empty() { t.clone() } 611 | else { 612 | let u_ = f(u)?; 613 | if &**u_ as *const _ == &***u as *const _ { t.clone() } 614 | else { 615 | changed.set(true); 616 | Constr::Const(ORef(Arc::from(PUniverses(c.clone(), 617 | u_)))) 618 | } 619 | } 620 | ) 621 | }, 622 | Constr::Ind(ref o) => { 623 | let PUniverses(ref i, ref u) = **o; 624 | return Ok( 625 | if u.is_empty() { t.clone() } 626 | else { 627 | let u_ = f(u)?; 628 | if &**u_ as *const _ == &***u as *const _ { t.clone() } 629 | else { 630 | changed.set(true); 631 | Constr::Ind(ORef(Arc::from(PUniverses(i.clone(), 632 | u_)))) 633 | } 634 | } 635 | ) 636 | }, 637 | Constr::Construct(ref o) => { 638 | let PUniverses(ref c, ref u) = **o; 639 | return Ok( 640 | if u.is_empty() { t.clone() } 641 | else { 642 | let u_ = f(u)?; 643 | if &**u_ as *const _ == &***u as *const _ { t.clone() } 644 | else { 645 | changed.set(true); 646 | Constr::Construct(ORef(Arc::from(PUniverses(c.clone(), 647 | u_)))) 648 | } 649 | } 650 | ) 651 | }, 652 | Constr::Sort(ref o) => { 653 | if let Sort::Type(ref u) = **o { 654 | return Ok({ 655 | let u_ = subst.subst_universe(u, tbl)?; 656 | if u_.hequal(u) { t.clone() } 657 | else { 658 | changed.set(true); 659 | Constr::Sort(ORef(Arc::new(u_.sort_of()))) 660 | } 661 | }) 662 | } 663 | }, 664 | _ => {} 665 | } 666 | t.map_constr_with_binders( |_| Ok(()), aux, env) 667 | } 668 | let changed = Cell::new(false); 669 | let c_ = aux(self, &(subst, tbl, &changed))?; 670 | Ok(if changed.get() { Cow::Owned(c_) } else { Cow::Borrowed(self) }) 671 | } 672 | } 673 | } 674 | 675 | impl Sort { 676 | fn compare(&self, s2: &Self) -> bool { 677 | match (self, s2) { 678 | (&Sort::Prop(c1), &Sort::Prop(c2)) => { 679 | match (c1, c2) { 680 | (SortContents::Pos, SortContents::Pos) | 681 | (SortContents::Null, SortContents::Null) => true, 682 | (SortContents::Pos, SortContents::Null) => false, 683 | (SortContents::Null, SortContents::Pos) => false, 684 | } 685 | }, 686 | (&Sort::Type(ref u1), &Sort::Type(ref u2)) => u1.equal(u2), 687 | (&Sort::Prop(_), &Sort::Type(_)) => false, 688 | (&Sort::Type(_), &Sort::Prop(_)) => false, 689 | } 690 | } 691 | } 692 | 693 | impl PUniverses { 694 | fn eq(&self, &PUniverses(ref c2, ref u2): &Self, f: F) -> bool 695 | where F: Fn(&T, &T) -> bool, 696 | { 697 | let PUniverses(ref c1, ref u1) = *self; 698 | u1.equal(u2) && f(c1, c2) 699 | } 700 | } 701 | -------------------------------------------------------------------------------- /src/coq/checker/univ.rs: -------------------------------------------------------------------------------- 1 | use coq::kernel::esubst::{ 2 | IdxError, 3 | IdxResult, 4 | }; 5 | use coq::lib::hashcons::{HashconsedType, Hlist, Hstring, Table}; 6 | use coq::lib::hashset::combine; 7 | use core::convert::TryFrom; 8 | use ocaml::de::{ 9 | ORef, 10 | }; 11 | use ocaml::values::{ 12 | Expr, 13 | HList, 14 | Instance, 15 | Int, 16 | Level, 17 | RawLevel, 18 | Univ, 19 | }; 20 | use std::cmp::{Ord, Ordering}; 21 | use std::collections::HashMap; 22 | use std::option::{NoneError}; 23 | use std::sync::{Arc}; 24 | 25 | /// Comparison on this type is pointer equality 26 | struct CanonicalArc { 27 | univ: Level, 28 | lt: Vec, 29 | le: Vec, 30 | rank: Int, 31 | predicative: bool, 32 | } 33 | 34 | #[derive(Clone,Copy,Debug,Eq,PartialEq)] 35 | enum FastOrder { 36 | Eq, 37 | Lt, 38 | Le, 39 | NLe, 40 | } 41 | 42 | /// A Level.t is either an alias for another one, or a canonical one, 43 | /// for which we know the universes that are above 44 | enum UnivEntry { 45 | Canonical(CanonicalArc), 46 | Equiv(Level), 47 | } 48 | 49 | pub struct Universes(UMap); 50 | 51 | #[derive(Clone, Debug, Eq, PartialEq)] 52 | pub enum UnivError { 53 | Anomaly(String), 54 | } 55 | 56 | #[derive(Clone, Copy, Debug, Eq, PartialEq)] 57 | pub enum SubstError { 58 | NotFound, 59 | Idx(IdxError), 60 | } 61 | 62 | type UMap = HashMap; 63 | 64 | type Hexpr = (); 65 | 66 | pub type Helem = Table)>, U>; 67 | 68 | pub type Huniv = Helem; 69 | 70 | pub type UnivResult = Result; 71 | 72 | pub type SubstResult = Result; 73 | 74 | pub trait Hashconsed { 75 | fn hash(&self) -> IdxResult; 76 | fn eq(&self, &Self) -> bool; 77 | fn hcons<'a>(self, &'a U) -> Self 78 | where Self: ToOwned; 79 | } 80 | 81 | impl ::std::convert::From for SubstError { 82 | fn from(e: IdxError) -> Self { 83 | SubstError::Idx(e) 84 | } 85 | } 86 | 87 | impl HashconsedType for ORef<(T, Int, HList)> 88 | where 89 | T: Hashconsed, 90 | T: Clone, 91 | { 92 | fn hash(&self) -> i64 { 93 | let (_, h, _) = **self; 94 | h 95 | } 96 | 97 | fn eq(&self, o2: &Self) -> bool { 98 | let (ref x1, _, ref l1) = **self; 99 | let (ref x2, _, ref l2) = **o2; 100 | x1.eq(x2) && l1.hequal(l2) 101 | } 102 | 103 | fn hashcons(self, u: &U) -> Self 104 | { 105 | // FIXME: Should these terms be new each time, or should we try to get more sharing? 106 | let (ref x, h, ref l) = *self; 107 | let x = x.to_owned().hcons(u); 108 | ORef(Arc::new((x, h, l.to_owned()))) 109 | } 110 | } 111 | 112 | /// Note: the OCaml is needlessly generic over T. At the end of the day, we only use HList with 113 | /// Univ. 114 | impl HList 115 | { 116 | fn hash(&self) -> i64 { 117 | match *self { 118 | HList::Nil => 0, 119 | HList::Cons(ref o) => { 120 | let (_, h, _) = **o; 121 | h 122 | } 123 | } 124 | } 125 | 126 | pub fn hequal(&self, l2: &Self) -> bool { 127 | // Works assuming all HLists are already hconsed. 128 | match (self, l2) { 129 | (&HList::Nil, &HList::Nil) => true, 130 | (&HList::Cons(ref o1), &HList::Cons(ref o2)) => &**o1 as *const _ == &**o2 as *const _, 131 | (_, _) => false, 132 | } 133 | } 134 | 135 | /// No recursive call: the interface guarantees that all HLists from this 136 | /// program are already hashconsed. If we get some external HList, we can 137 | /// still reconstruct it by traversing it entirely. 138 | fn hcons<'a, U>(self, u: &'a Helem) -> Self 139 | where 140 | T: Hashconsed, 141 | T: Clone, 142 | { 143 | match self { 144 | HList::Nil => HList::Nil, 145 | HList::Cons(o) => HList::Cons(u.hcons(o)), 146 | } 147 | } 148 | 149 | fn nil() -> Self { 150 | HList::Nil 151 | } 152 | 153 | fn cons<'a, U>(x: T, l: Self, u: &'a Helem) -> IdxResult 154 | where 155 | T: Hashconsed, 156 | T: Clone, 157 | { 158 | let h = x.hash()?; 159 | let hl = l.hash(); 160 | let h = combine::combine(h, hl); 161 | Ok(HList::Cons(ORef(Arc::new((x, h, l)))).hcons(u)) 162 | } 163 | 164 | pub fn map<'a, U, F, E>(&self, mut f: F, u: &'a Helem) -> Result 165 | where 166 | E: From, 167 | F: FnMut(&T) -> Result, 168 | T: Hashconsed, 169 | T: Clone, 170 | { 171 | match *self { 172 | HList::Nil => Ok(HList::nil()), 173 | HList::Cons(ref o) => { 174 | let (ref x, _, ref l) = **o; 175 | Ok(Self::cons(f(x)?, l.map(f, u)?, u)?) 176 | } 177 | } 178 | } 179 | 180 | /// Apriori hashconsing ensures that the map is equal to its argument 181 | pub fn smart_map<'a, U, F, E>(&self, f: F, u: &'a Helem) -> Result 182 | where 183 | E: From, 184 | F: FnMut(&T) -> Result, 185 | T: Hashconsed, 186 | T: Clone, 187 | { 188 | self.map(f, u) 189 | } 190 | 191 | fn for_all2(&self, l2: &Self, f: F) -> bool 192 | where 193 | F: Fn(&T, &T) -> bool, 194 | { 195 | let mut l1 = self.iter(); 196 | let mut l2 = l2.iter(); 197 | loop { 198 | match (l1.next(), l2.next()) { 199 | (None, None) => return true, 200 | (Some(x1), Some(x2)) => { if !f(x1, x2) { return false } }, 201 | (_, _) => return false, 202 | } 203 | } 204 | } 205 | } 206 | 207 | 208 | impl RawLevel { 209 | fn equal(&self, y: &Self) -> bool { 210 | match (self, y) { 211 | (&RawLevel::Prop, &RawLevel::Prop) => true, 212 | (&RawLevel::Set, &RawLevel::Set) => true, 213 | (&RawLevel::Level(n, ref d), &RawLevel::Level(n_, ref d_)) => 214 | n == n_ && d.equal(d_), 215 | (&RawLevel::Var(n), &RawLevel::Var(n_)) => n == n_, 216 | (_, _) => false, 217 | } 218 | } 219 | 220 | fn compare(&self, v: &Self) -> Ordering { 221 | match (self, v) { 222 | (&RawLevel::Prop, &RawLevel::Prop) => Ordering::Equal, 223 | (&RawLevel::Prop, _) => Ordering::Less, 224 | (_, &RawLevel::Prop) => Ordering::Greater, 225 | (&RawLevel::Set, &RawLevel::Set) => Ordering::Equal, 226 | (&RawLevel::Set, _) => Ordering::Less, 227 | (_, &RawLevel::Set) => Ordering::Greater, 228 | (&RawLevel::Level(i1, ref dp1), &RawLevel::Level(i2, ref dp2)) => { 229 | match i1.cmp(&i2) { 230 | Ordering::Less => Ordering::Less, 231 | Ordering::Greater => Ordering::Greater, 232 | Ordering::Equal => dp1.compare(dp2), 233 | } 234 | }, 235 | (&RawLevel::Level(_, _), _) => Ordering::Less, 236 | (_, &RawLevel::Level(_, _)) => Ordering::Greater, 237 | (&RawLevel::Var(n), &RawLevel::Var(m)) => n.cmp(&m), 238 | } 239 | } 240 | 241 | fn hequal(&self, y: &Self) -> bool { 242 | match (self, y) { 243 | (&RawLevel::Prop, &RawLevel::Prop) => true, 244 | (&RawLevel::Set, &RawLevel::Set) => true, 245 | (&RawLevel::Level(n, ref d), &RawLevel::Level(n_, ref d_)) => 246 | n == n_ && HashconsedType:: _>>::eq(d, d_), 247 | (&RawLevel::Var(n), &RawLevel::Var(n_)) => n == n_, 248 | _ => false, 249 | } 250 | } 251 | 252 | /* fn hash(&self) -> i64 { 253 | match *self { 254 | RawLevel::Prop => combine::combinesmall(1, 0), 255 | RawLevel::Set => combine::combinesmall(1, 1), 256 | RawLevel::Var(n) => combine::combinesmall(2, n), 257 | RawLevel::Level(n, ref d) => 258 | combine::combinesmall(3, combine::combine(n, d.hash())) 259 | } 260 | } */ 261 | } 262 | 263 | /// Hashcons on levels + their hash 264 | impl Level { 265 | fn hequal(&self, y: &Self) -> bool { 266 | self.hash == y.hash && self.data.hequal(&y.data) 267 | } 268 | 269 | fn hash(&self) -> i64 { 270 | self.hash 271 | } 272 | 273 | fn data(&self) -> &RawLevel { 274 | &self.data 275 | } 276 | 277 | pub fn equal(&self, y: &Self) -> bool { 278 | self.hash == y.hash && 279 | self.data.equal(&y.data) 280 | } 281 | } 282 | 283 | /// For use in UMap. 284 | /// TODO: Consider replacing this with a LevelKey wrapper, once it's been ascertained that this 285 | /// won't cause problems. 286 | impl PartialEq for Level { 287 | fn eq(&self, v: &Self) -> bool { 288 | // Comparison equals 0 for RawLevels and Levels is the same as equality. 289 | self.equal(v) 290 | } 291 | } 292 | 293 | /// For use in UMap. 294 | /// TODO: Consider replacing this with a LevelKey wrapper, once it's been ascertained that this 295 | /// won't cause problems. 296 | impl Eq for Level {} 297 | 298 | /// For use in UMap. 299 | /// TODO: Consider replacing this with a LevelKey wrapper, once it's been ascertained that this 300 | /// won't cause problems. 301 | impl ::std::hash::Hash for Level { 302 | fn hash(&self, state: &mut H) 303 | where 304 | H: ::std::hash::Hasher, 305 | { 306 | // Just write the hash directly to the state... note that if this isn't a dummy hasher, 307 | // this will try to scramble the hash, which is possibly not a good thing for collisions. 308 | state.write_i64(self.hash()); 309 | } 310 | } 311 | 312 | impl Expr { 313 | fn hequal(&self, l2: &Self) -> bool { 314 | match (self, l2) { 315 | (&Expr(ref b, n), &Expr(ref b_, n_)) => 316 | b.hequal(b_) && n == n_, 317 | } 318 | } 319 | 320 | fn hash(&self) -> IdxResult { 321 | let Expr(ref x, n) = *self; 322 | n.checked_add(x.hash()).ok_or(IdxError::from(NoneError)) 323 | } 324 | } 325 | 326 | impl Hashconsed<()> for Expr { 327 | /// NOTE: Right now we assume Dps are all already hash consed, so we don't need HDp to 328 | /// implement this. 329 | fn hcons(self, _: &Hexpr) -> Self { 330 | self 331 | } 332 | 333 | fn hash(&self) -> IdxResult { 334 | Expr::hash(self) 335 | } 336 | 337 | /// Interestingly, this just uses normal equality, which suggests that we really *aren't* 338 | /// relying on the hash consing in any fundamental way... 339 | fn eq(&self, y: &Self) -> bool { 340 | self.equal(y) 341 | } 342 | } 343 | 344 | impl CanonicalArc { 345 | /// [compare_neq] : is [arcv] in the transitive upward closure of [arcu] ? 346 | 347 | /// In [strict] mode, we fully distinguish between LE and LT, while in 348 | /// non-strict mode, we simply answer LE for both situations. 349 | /// 350 | /// If [arcv] is encountered in a LT part, we could directly answer 351 | /// without visiting unneeded parts of this transitive closure. 352 | /// In [strict] mode, if [arcv] is encountered in a LE part, we could only 353 | /// change the default answer (1st arg [c]) from NLE to LE, since a strict 354 | /// constraint may appear later. During the recursive traversal, 355 | /// [lt_done] and [le_done] are universes we have already visited, 356 | /// they do not contain [arcv]. The 4rd arg is [(lt_todo,le_todo)], 357 | /// two lists of universes not yet considered, known to be above [arcu], 358 | /// strictly or not. 359 | /// 360 | /// We use depth-first search, but the presence of [arcv] in [new_lt] 361 | /// is checked as soon as possible : this seems to be slightly faster 362 | /// on a test. 363 | /// 364 | /// The universe should actually be in the universe map, or else it will return an error. 365 | fn fast_compare_neq(&self, arcv: &Self, strict: bool, g: &Universes) -> UnivResult { 366 | // [c] characterizes whether arcv has already been related 367 | // to arcu among the lt_done,le_done universe 368 | let mut c = FastOrder::NLe; 369 | let mut lt_done = Vec::new(); 370 | let mut le_done = Vec::new(); 371 | let mut lt_todo : Vec<&CanonicalArc> = Vec::new(); 372 | let mut le_todo = vec![self]; 373 | loop { 374 | if let Some(arc) = lt_todo.pop() { 375 | if !lt_done.iter().any( |&arc_| arc as *const _ == arc_ as *const _) { 376 | for u in arc.le.iter() { 377 | let arc = u.repr(g)?; 378 | if arc as *const _ == arcv as *const _ { 379 | return Ok(if strict { FastOrder::Lt } else { FastOrder::Le }) 380 | } else { 381 | lt_todo.push(arc); 382 | } 383 | } 384 | for u in arc.lt.iter() { 385 | let arc = u.repr(g)?; 386 | if arc as *const _ == arcv as *const _ { 387 | return Ok(if strict { FastOrder::Lt } else { FastOrder::Le }) 388 | } else { 389 | lt_todo.push(arc); 390 | } 391 | } 392 | lt_done.push(arc); 393 | } 394 | } else if let Some(arc) = le_todo.pop() { 395 | // lt_todo = [] 396 | if arc as *const _ == arcv as *const _ { 397 | // No need to continue inspecting universes above arc; 398 | // if arcv is strictly above arc, then we would have a cycle. 399 | // But we cannot answer LE yet, a stronger constraint may 400 | // come later from [le_todo]. 401 | if strict { 402 | c = FastOrder::Le; 403 | } else { 404 | return Ok(FastOrder::Le); 405 | } 406 | } else { 407 | if !(lt_done.iter().any( |&arc_| arc as *const _ == arc_ as *const _) || 408 | le_done.iter().any( |&arc_| arc as *const _ == arc_ as *const _)) { 409 | for u in arc.lt.iter() { 410 | let arc = u.repr(g)?; 411 | if arc as *const _ == arcv as *const _ { 412 | return Ok(if strict { FastOrder::Lt } else { FastOrder::Le }) 413 | } else { 414 | lt_todo.push(arc); 415 | } 416 | } 417 | // Cannot use .extend here because we need to fail fast on failure. There 418 | // is probably a better way to deal with this. 419 | for u in arc.le.iter() { 420 | le_todo.push(u.repr(g)?); 421 | } 422 | le_done.push(arc); 423 | } 424 | } 425 | } else { 426 | // lt_todo = [], le_todo = [] 427 | return Ok(c) 428 | } 429 | } 430 | } 431 | 432 | // /// The universe should actually be in the universe map, or else it will return an error. 433 | // fn fast_compare(&self, arcv: &Self, g: &Universes) -> UnivResult { 434 | // if self as *const _ == arcv as *const _ { Ok(FastOrder::Eq) } 435 | // else { self.fast_compare_neq(arcv, true, g) } 436 | // } 437 | 438 | /// The universe should actually be in the universe map, or else it will return an error. 439 | fn is_leq(&self, arcv: &Self, g: &Universes) -> UnivResult { 440 | Ok(self as *const _ == arcv as *const _ || 441 | match self.fast_compare_neq(arcv, false, g)? { 442 | FastOrder::NLe => false, 443 | FastOrder::Eq | FastOrder::Le | FastOrder::Lt => true, 444 | }) 445 | } 446 | 447 | /// The universe should actually be in the universe map, or else it will return an error. 448 | fn is_lt(&self, arcv: &Self, g: &Universes) -> UnivResult { 449 | if self as *const _ == arcv as *const _ { 450 | Ok(false) 451 | } else { 452 | self.fast_compare_neq(arcv, true, g).map( |c| match c { 453 | FastOrder::Lt => true, 454 | FastOrder::Eq | FastOrder::Le | FastOrder::NLe => false, 455 | }) 456 | } 457 | } 458 | 459 | fn is_prop(&self) -> bool { 460 | self.univ.is_prop() 461 | } 462 | 463 | fn is_set(&self) -> bool { 464 | self.univ.is_set() 465 | } 466 | } 467 | 468 | impl Level { 469 | /// Worked out elsewhere; if this is wrong, we can figure out another way to get this value. 470 | const PROP : Self = Level { hash: 7, data: RawLevel::Prop }; 471 | const SET : Self = Level { hash: 8, data: RawLevel::Set }; 472 | 473 | fn is_prop(&self) -> bool { 474 | match self.data { 475 | RawLevel::Prop => true, 476 | _ => false, 477 | } 478 | } 479 | 480 | fn is_set(&self) -> bool { 481 | match self.data { 482 | RawLevel::Set => true, 483 | _ => false, 484 | } 485 | } 486 | 487 | fn compare(&self, v: &Self) -> Ordering { 488 | if self.hequal(v) { Ordering::Equal } 489 | else { 490 | match self.hash().cmp(&v.hash()) { 491 | Ordering::Equal => self.data().compare(v.data()), 492 | // FIXME: Is hash ordering really reliable? 493 | o => o, 494 | } 495 | } 496 | } 497 | 498 | /// Every Level.t has a unique canonical arc representative 499 | 500 | /// repr : universes -> Level.t -> canonical_arc 501 | /// canonical representative : we follow the Equiv links 502 | /// The universe should actually be in the universe map, or else it will return an error. 503 | /// Also note: if the map universe map contains Equiv cycles, this will loop forever! 504 | fn repr<'a>(&'a self, g: &'a Universes) -> UnivResult<&CanonicalArc> { 505 | let mut u = self; 506 | loop { 507 | match g.0.get(u) { 508 | Some(&UnivEntry::Equiv(ref v)) => { u = v }, 509 | Some(&UnivEntry::Canonical(ref arc)) => return Ok(arc), 510 | None => 511 | return Err(UnivError::Anomaly(format!("Univ.repr: Universe {:?} undefined", 512 | u))), 513 | } 514 | } 515 | } 516 | 517 | /// Invariants : compare(u,v) = EQ <=> compare(v,u) = EQ 518 | /// compare(u,v) = LT or LE => compare(v,u) = NLE 519 | /// compare(u,v) = NLE => compare(v,u) = NLE or LE or LT 520 | /// 521 | /// Adding u>=v is consistent iff compare(v,u) # LT 522 | /// and then it is redundant iff compare(u,v) # NLE 523 | /// Adding u>v is consistent iff compare(v,u) = NLE 524 | /// and then it is redundant iff compare(u,v) = LT 525 | 526 | /// Universe checks [check_eq] and [check_leq], used in coqchk 527 | 528 | /// First, checks on universe levels 529 | 530 | /// The universe should actually be in the universe map, or else it will return an error. 531 | fn check_equal(&self, v: &Level, g: &Universes) -> UnivResult { 532 | let arcu = self.repr(g)?; 533 | let arcv = v.repr(g)?; 534 | Ok(arcu as *const _ == arcv as *const _) 535 | } 536 | 537 | /// The universe should actually be in the universe map, or else it will return an error. 538 | fn check_eq(&self, v: &Level, g: &Universes) -> UnivResult { 539 | Ok(self.check_equal(v, g)?) 540 | } 541 | 542 | /// The universe should actually be in the universe map, or else it will return an error. 543 | fn check_smaller(&self, v: &Self, strict: bool, g: &Universes) -> UnivResult { 544 | let arcu = self.repr(g)?; 545 | let arcv = v.repr(g)?; 546 | if strict { 547 | arcu.is_lt(arcv, g) 548 | } else { 549 | Ok(arcu.is_prop() 550 | || (arcu.is_set() && arcv.predicative) 551 | || (arcu.is_leq(arcv, g)?)) 552 | } 553 | } 554 | } 555 | 556 | impl Expr { 557 | /// Worked out elsewhere; if this is wrong, we can figure out another way to get this value. 558 | const PROP : Self = Expr(Level::PROP, 0); 559 | 560 | const SET : Self = Expr(Level::SET, 0); 561 | 562 | const TYPE1 : Self = Expr(Level::SET, 1); 563 | 564 | fn is_prop(&self) -> bool { 565 | if let Expr(ref l, 0) = *self { l.is_prop() } 566 | else { false } 567 | } 568 | 569 | fn equal(&self, y: &Self) -> bool { 570 | let Expr(ref u, n) = *self; 571 | let Expr(ref v, n_) = *y; 572 | n == n_ && u.equal(v) 573 | } 574 | 575 | fn successor(&self) -> IdxResult { 576 | let Expr(ref u, n) = *self; 577 | if u.is_prop() { Ok(Self::TYPE1) } 578 | // NOTE: Assuming Dps are all maximally hconsed already when loaded from the file, we just 579 | // need to clone() here to retain maximal sharing. 580 | else { Ok(Expr(u.clone(), n.checked_add(1).ok_or(IdxError::from(NoneError))?)) } 581 | } 582 | 583 | fn super_(&self, y: &Self) -> Result { 584 | let Expr(ref u, n) = *self; 585 | let Expr(ref v, n_) = *self; 586 | match u.compare(v) { 587 | Ordering::Equal => if n < n_ { Ok(true) } else { Ok(false) }, 588 | _ if self.is_prop() => Ok(true), 589 | _ if y.is_prop() => Ok(false), 590 | o => Err(o) 591 | } 592 | } 593 | 594 | fn map(&self, f: F, u: &Hexpr) -> Result 595 | where 596 | F: Fn(&Level) -> Result, 597 | { 598 | let Expr(ref v, n) = *self; 599 | let v_ = f(v)?; 600 | Ok(if v_.is_prop() && n != 0 { 601 | Expr(Level::SET, n).hcons(u) 602 | } else { 603 | Expr(v_, n).hcons(u) 604 | }) 605 | } 606 | 607 | /// The universe should actually be in the universe map, or else it will return an error. 608 | fn check_equal(&self, y: &Self, g: &Universes) -> UnivResult { 609 | Ok(self.hequal(y) || { 610 | let Expr(ref u, n) = *self; 611 | let Expr(ref v, m) = *y; 612 | n == m && u.check_equal(v, g)? 613 | }) 614 | } 615 | 616 | /// The universe should actually be in the universe map, or else it will return an error. 617 | fn check_smaller(&self, &Expr(ref v, m): &Self, g: &Universes) -> UnivResult { 618 | let Expr(ref u, n) = *self; 619 | if n <= m { 620 | u.check_smaller(v, false, g) 621 | } else if n - m == 1 { 622 | // n - m is valid, because n > m, so 0 < n - m ≤ n ≤ i64::MAX. 623 | u.check_smaller(v, true, g) 624 | } else { 625 | Ok(false) 626 | } 627 | } 628 | 629 | /// The universe should actually be in the universe map, or else it will return an error. 630 | fn exists_bigger(&self, l: &Univ, g: &Universes) -> UnivResult { 631 | // NOTE: We don't just use any / all because we want to propagate errors; there may be a 632 | // way to do both somehow. 633 | for ul_ in l.iter() { 634 | if self.check_smaller(ul_, g)? { return Ok(true) } 635 | } 636 | return Ok(false) 637 | } 638 | } 639 | 640 | impl Univ { 641 | pub fn equal(&self, y: &Self) -> bool { 642 | self.hequal(y) || 643 | self.hash() == y.hash() && 644 | self.for_all2(y, Expr::equal) 645 | } 646 | 647 | pub fn is_type0m(&self) -> bool { 648 | // I believe type0m is: 649 | // Cons (({ hash = 7; data = Prop }, 0), 459193, Nil) 650 | // Details worked out elsewhere; if they're wrong, we can fgure out something else. 651 | match *self { 652 | HList::Nil => false, 653 | HList::Cons(ref o) => { 654 | let (ref x, h, ref l) = **o; 655 | h == 459193 && 656 | x.equal(&Expr::PROP) && 657 | if let HList::Nil = *l { true } else { false } 658 | } 659 | } 660 | } 661 | 662 | pub fn is_type0(&self) -> bool { 663 | // I believe type0 is: 664 | // Cons (({ hash = 8; data = Set }, 0), 524792, Nil) 665 | // Details worked out elsewhere; if they're wrong, we can fgure out something else. 666 | match *self { 667 | HList::Nil => false, 668 | HList::Cons(ref o) => { 669 | let (ref x, h, ref l) = **o; 670 | h == 524792 && 671 | x.equal(&Expr::SET) && 672 | if let HList::Nil = *l { true } else { false } 673 | } 674 | } 675 | } 676 | 677 | /// Returns the formal universe that lies just above the universe variable u. 678 | /// Used to type the sort u. 679 | pub fn super_(&self, u: &Huniv) -> IdxResult { 680 | self.map( |x| x.successor(), u ) 681 | } 682 | 683 | fn sort(&self, tbl: &Huniv) -> IdxResult { 684 | fn aux(a: &Expr, mut l: Univ, tbl: &Huniv) -> IdxResult { 685 | while let HList::Cons(o) = l { 686 | match a.super_(&(*o).0) { 687 | Ok(false) => { l = (*o).2.clone(); }, 688 | Ok(true) => return Ok(HList::Cons(o)), 689 | Err(Ordering::Less) => return Univ::cons(a.clone(), HList::Cons(o), tbl), 690 | Err(_) => 691 | return Univ::cons((&(*o).0).clone(), aux(a, (&(*o).2).clone(), tbl)?, tbl), 692 | } 693 | } 694 | Univ::cons(a.clone(), l, tbl) 695 | } 696 | self.iter().fold(Ok(HList::nil()), |acc, a| aux(a, acc?, tbl)) 697 | } 698 | 699 | /// Then, checks on universes 700 | /// The universe should actually be in the universe map, or else it will return an error. 701 | fn check_eq_univs(&self, l2: &Self, g: &Universes) -> UnivResult { 702 | // NOTE: We don't just use any / all because we want to propagate errors; there may be a 703 | // way to do both somehow. 704 | let exists = |x1: &Expr, l: &Univ| { 705 | for x2 in l.iter() { 706 | if x1.check_equal(x2, g)? { return Ok(true) } 707 | } 708 | Ok(false) 709 | }; 710 | for x1 in self.iter() { 711 | if !exists(x1, l2)? { return Ok(false) } 712 | } 713 | for x2 in l2.iter() { 714 | if !exists(x2, self)? { return Ok(false) } 715 | } 716 | return Ok(true) 717 | } 718 | 719 | /// The universe should actually be in the universe map, or else it will return an error. 720 | pub fn check_eq(&self, v: &Self, g: &Universes) -> UnivResult { 721 | Ok(self.hequal(v) || 722 | self.check_eq_univs(v, g)?) 723 | } 724 | 725 | /// The universe should actually be in the universe map, or else it will return an error. 726 | fn real_check_leq(&self, v: &Self, g: &Universes) -> UnivResult { 727 | // NOTE: We don't just use any / all because we want to propagate errors; there may be a 728 | // way to do both somehow. 729 | for ul in self.iter() { 730 | if !ul.exists_bigger(v, g)? { return Ok(false) } 731 | } 732 | return Ok(true) 733 | } 734 | 735 | /// The universe should actually be in the universe map, or else it will return an error. 736 | pub fn check_leq(&self, v: &Self, g: &Universes) -> UnivResult { 737 | Ok(self.hequal(v) || 738 | self.is_type0m() || 739 | self.check_eq_univs(v, g)? || 740 | self.real_check_leq(v, g)?) 741 | } 742 | } 743 | 744 | impl Instance { 745 | pub fn equal(&self, u: &Self) -> bool { 746 | &***self as *const _ == &***u as *const _ || 747 | (self.is_empty() && u.is_empty()) || 748 | (/* Necessary as universe instances might come from different modules and 749 | unmarshalling doesn't preserve sharing */ 750 | self.len() == u.len() && self.iter().zip(u.iter()).all( |(x, y)| x.equal(y))) 751 | } 752 | 753 | /// The universe should actually be in the universe map, or else it will return an error. 754 | pub fn check_eq(&self, t2: &Instance, g: &Universes) -> UnivResult { 755 | if &***self as *const _ == &***t2 as *const _ { return Ok(true) } 756 | if self.len() != t2.len() { return Ok(false) } 757 | // NOTE: We don't just use any / all because we want to propagate errors; there may be a 758 | // way to do both somehow. 759 | for (u, v) in self.iter().zip(t2.iter()) { 760 | if !u.check_eq(v, g)? { 761 | return Ok(false) 762 | } 763 | } 764 | return Ok(true) 765 | } 766 | 767 | /// Substitution functions 768 | 769 | /// Substitute instance inst for ctx in csts 770 | fn subst_instance_level(&self, l: &Level) -> SubstResult { 771 | Ok(match l.data { 772 | RawLevel::Var(n) => { 773 | // TODO: Check whether this get is handled at typechecking time? 774 | let n = usize::try_from(n).map_err(IdxError::from)?; 775 | // TODO: Check whether this is checked at typechecking time? 776 | self.get(n).ok_or(SubstError::NotFound)? 777 | }, 778 | _ => l, 779 | }.clone()) 780 | } 781 | 782 | pub fn subst_instance(&self, i: &Instance) -> SubstResult { 783 | i.smart_map( |l| self.subst_instance_level(l), Level::hequal) 784 | } 785 | 786 | pub fn subst_universe(&self, u: &Univ, tbl: &Huniv) -> SubstResult { 787 | let u_ = u.smart_map( |x| x.map( |u| self.subst_instance_level(u), &()), tbl)?; 788 | if u.hequal(&u_) { Ok(u_) } 789 | else { Ok(u_.sort(tbl)?) } 790 | } 791 | } 792 | -------------------------------------------------------------------------------- /src/coq/kernel/esubst.rs: -------------------------------------------------------------------------------- 1 | use std::borrow::{Borrow}; 2 | use std::convert::{TryFrom}; 3 | use std::num::{TryFromIntError}; 4 | use std::option::{NoneError}; 5 | // use smallvec::{SmallVec}; 6 | use core::nonzero::{NonZero, Zeroable}; 7 | 8 | /* 9 | 10 | For my own clarification, an intro to De Bruijn indices and explicit substitutions: 11 | 12 | - a ⋅ s ≡ what you get when you try to substitute a for the first De Bruijn index 13 | in s. 14 | 15 | id ≡ identity substitution: De Bruijn indices go to themselves. 16 | 17 | (λa)b →β a[b ⋅ id] 18 | 19 | 1[c ⋅ s] → c (1 is the De Bruijn index 1) 20 | 21 | (cd)[s] → (c[s])(d[s]) 22 | 23 | */ 24 | 25 | // An Idx is guaranteed to have positive (nonzero) i32. 26 | #[derive(Clone,Debug,Copy,Eq,Hash,PartialEq)] 27 | pub struct Idx(i32); 28 | 29 | #[derive(Clone, Copy, Debug, Eq, PartialEq)] 30 | pub struct IdxError(()); 31 | 32 | pub type IdxResult = Result; 33 | 34 | impl ::std::convert::From for i32 { 35 | fn from(idx: Idx) -> i32 { 36 | idx.0 37 | } 38 | } 39 | 40 | impl ::std::convert::From for u32 { 41 | fn from(idx: Idx) -> u32 { 42 | // Valid because idx is a positive i32, which is always in range for u32. 43 | idx.0 as u32 44 | } 45 | } 46 | 47 | impl ::std::convert::From for IdxError { 48 | fn from(_: NoneError) -> Self { 49 | IdxError(()) 50 | } 51 | } 52 | 53 | impl ::std::convert::From for IdxError { 54 | fn from(_: TryFromIntError) -> Self { 55 | IdxError(()) 56 | } 57 | } 58 | 59 | impl Idx { 60 | pub fn new(x: NonZero) -> Result>::Error*/IdxError> where i32: TryFrom, T: Zeroable { 61 | match i32::try_from(x.get()) { 62 | // The 0 case should not happen, but since try_from is a safe trait it's hard to enforce this 63 | // (someone could provide a stupid implementation that didn't map 0 to 0). Anyway, the 64 | // purpose of the NonZero in the argument is to have people check for zero ahead of 65 | // time. 66 | Ok(i) if i > 0 => Ok(Idx(i)), 67 | _ => Err(IdxError(())), 68 | } 69 | } 70 | 71 | pub const ONE : Self = Idx(1); 72 | 73 | pub fn checked_add(self, o: Idx) -> IdxResult { 74 | // Must be positive since both Idxs are positive. 75 | Ok(Idx(self.0.checked_add(o.0)?)) 76 | } 77 | 78 | pub fn checked_sub(self, o: Idx) -> IdxResult> { 79 | // Both self.0 and o.0 are positive i32s, so subtracting one from the other definitely does 80 | // not overflow. 81 | match self.0 - o.0 { 82 | i if i > 0 => Ok(Some(Idx(i))), 83 | 0 => Ok(None), 84 | _ => Err(IdxError(())), 85 | } 86 | } 87 | } 88 | 89 | /// We default to SmallVec of size 1 for now; not sure how big these usually get. 90 | /// Can easily change it to some other value if necessary; the point is to try to reduce the 91 | /// load on jemalloc (since we allocate almost nothing but Subs and ESubs vecs during reduction, 92 | /// outside of bump allocation in the term arena). 93 | /// 94 | /// Note that currently, there's a tradeoff between how big we make this and how large our FTerms 95 | /// are, but that tradeoff isn't very fundamental since we could always allocate the SVec in an 96 | /// arena as well. 97 | /// 98 | /// UPDATED: For this to work, because of the way we're using arenas, we'd need SmallVec to 99 | /// implement #[may_dangle] on T. This would probably require a somewhat substantial rewrite of 100 | /// SmallVec, though, in order to be safe (for instance, proper use of PhantomData). So it's 101 | /// deferred for the moment. May be addressed later. 102 | // type SVec = SVec<[I; 1]>; 103 | type SVec = Vec; 104 | 105 | /// Explicit substitutions of type [T], where I : Borrow<[T]>. 106 | /// 107 | /// NOTE: This differs from the OCaml's implementation because instead of representing 108 | /// this as a linked list of subs, we represent a set of substitutions as a vector that 109 | /// terminates in a bounded identity. This reduces sharing but significantly decreases 110 | /// allocations and indirection, and makes in-place mutation easier. We will see 111 | /// whether this turns out to be worthwhile. 112 | #[derive(Clone,Debug)] 113 | pub struct Subs { 114 | /// Substitution operations (applied in order from right to left). 115 | ops: SVec>, 116 | /// ESID(n) = %n END bounded identity 117 | /// Note that this can be 0, not not negative. 118 | id: i32, 119 | } 120 | 121 | #[derive(Clone,Debug)] 122 | enum Op { 123 | /// CONS([|t1..tn|],S) = (S.t1...tn) parallel substitution 124 | /// (beware of the order: indice 1 is substituted by tn) 125 | Cons(I), 126 | /// SHIFT(n,S) = (^n o S) terms in S are relocated with n vars 127 | Shift(Idx), 128 | /// LIFT(n,S) = (%n S) stands for ((^n o S).n...1) 129 | /// (corresponds to S crossing n binders) 130 | Lift(Idx), 131 | } 132 | 133 | #[derive(Copy,Clone,Debug)] 134 | pub enum Expr { 135 | /// Variable is substituted by value (i.e. the value must be shifted 136 | /// by some number of binders). 137 | Val(T), 138 | /// Variable relocated at index; `None` if the variable points inside 139 | /// the substitution and `Some(k)` if it points k bindings beyond 140 | /// the substitution. 141 | Var(Option), 142 | } 143 | 144 | // TODO: investigate whether when using vectors in Rust, the extra checks 145 | // here to avoid pushing onto it are actually worthwhile (since we won't 146 | // trigger GC and don't allocate too frequently for vectors, and 147 | // don't do much pointer chasing either). 148 | impl Subs { 149 | pub fn dup(&self, f: F) -> Self 150 | where F: Fn(&I) -> I 151 | { 152 | Subs { 153 | ops: self.ops.iter().map( |op| { 154 | match *op { 155 | Op::Cons(ref i) => Op::Cons(f(i)), 156 | Op::Shift(i) => Op::Shift(i), 157 | Op::Lift(i) => Op::Lift(i), 158 | } }).collect(), 159 | id: self.id, 160 | } 161 | } 162 | 163 | pub fn id(idx: Option) -> Self { 164 | Subs { ops: SVec::new(), id: match idx { Some(Idx(i)) => i, None => 0 } } 165 | } 166 | 167 | fn push(&mut self, o: Op) -> IdxResult<()> { 168 | // TODO: Verify that u32 to usize is a safe upcast. 169 | if self.ops.len() == u32::max_value() as usize { 170 | // ops can never be larger than u32::MAX. 171 | return Err(IdxError(())); 172 | } 173 | self.ops.push(o); 174 | Ok(()) 175 | } 176 | 177 | pub fn cons(&mut self, x: I) -> IdxResult<()> 178 | where I: Borrow<[T]> { 179 | // Don't bother cons-ing an empty substitution list. 180 | if x.borrow().len() > 0 { 181 | return self.push(Op::Cons(x)); 182 | } 183 | Ok(()) 184 | } 185 | 186 | pub fn shift(&mut self, n: Idx) -> IdxResult<()> { 187 | if let Some(&mut Op::Shift(ref mut k)) = self.ops.last_mut() { 188 | // Coalesce shifts with shifts. 189 | *k = Idx(k.0.checked_add(n.0)?); 190 | return Ok(()) 191 | } 192 | return self.push(Op::Shift(n)) 193 | } 194 | 195 | pub fn liftn(&mut self, n: Idx) -> IdxResult<()> { 196 | match self.ops.last_mut() { 197 | None => { 198 | // Coalesce ids with lifts 199 | // This will now definitely be positive. 200 | self.id = self.id.checked_add(n.0)?; 201 | return Ok(()) 202 | }, 203 | Some(&mut Op::Lift(ref mut p)) => { 204 | // Coalesce lifts with lifts 205 | *p = Idx(p.0.checked_add(n.0)?); 206 | return Ok(()) 207 | }, 208 | _ => {}, 209 | } 210 | return self.push(Op::Lift(n)) 211 | } 212 | 213 | pub fn lift(&mut self) -> IdxResult<()> { 214 | // TODO: See if it's worthwhile factoring out the n > 0 check as is done in the OCaml 215 | // implementation. 216 | // The OCaml implementation presumably does it to avoid the branch, but is it 217 | // really a bottleneck? 218 | self.liftn(Idx(1)) 219 | } 220 | 221 | /// [shift_cons(k,s,[|t1..tn|])] builds (^k s).t1..tn 222 | pub fn shift_cons(&mut self, k: Idx, t: I) -> IdxResult<()> 223 | where I: Borrow<[T]> 224 | { 225 | /* // Don't bother shifting by 0 226 | if k > 0 { 227 | if let Some(&mut Op::Shift(ref mut n)) = &mut *self.ops.last_mut() { 228 | // Coalesce shifts with shifts. 229 | *n += k; 230 | } else { 231 | self.ops.push(Op::Shift(k + n)); 232 | } 233 | } */ 234 | // TODO: Figure out why the above is inlined in OCaml? Rust will probably inline it anyway. 235 | self.shift(k)?; 236 | // TODO: Figure out why the below must be inlined? Because it saves a branch, I guess? 237 | // But is this even a branch we can reliably save, if we're trying to avoid allocation? 238 | // self.push(Op::Cons(t)) 239 | self.cons(t) 240 | } 241 | 242 | /// [expand_rel k subs] expands de Bruijn [k] in the explicit substitution 243 | /// [subs]. The result is either (Inl(lams,v)) when the variable is 244 | /// substituted by value [v] under lams binders (i.e. v *has* to be 245 | /// shifted by lams), or (Inr (k',p)) when the variable k is just relocated 246 | /// as k'; p is None if the variable points inside subs and Some(k) if the 247 | /// variable points k bindings beyond subs (cf argument of ESID). 248 | pub fn expand_rel(&self, k: Idx) -> IdxResult<(Idx, Expr<&T>)> 249 | where I: Borrow<[T]> 250 | { 251 | let mut lams = 0i64; 252 | let mut k = k.0; 253 | // INVARIANT: 0 < k ≤ i32::MAX. 254 | // INVARIANT: after x iterations through the loop, lams ≤ x * i32::MAX, and 0 < k ≤ i32::MAX 255 | for op in self.ops.iter().rev() { 256 | match *op { 257 | Op::Cons(ref def) => { 258 | let def = def.borrow(); 259 | let len = def.len(); 260 | // TODO: Verify that i32 to usize is a safe upcast. 261 | match len.checked_sub(k as usize) { 262 | Some(i) => { 263 | // 0 ≤ len - k, and 1 ≤ k (so -k ≤ -1 and len-k ≤ len-1) 264 | // 0 ≤ len - k ≤ len - 1 < len 265 | // 0 ≤ i < len 266 | return Ok((Idx(i32::try_from(lams)?), Expr::Val(&def[i]))) 267 | }, 268 | None => { 269 | // len - k < 0, and k ≤ i32::MAX 270 | // 0 < k - len, and 271 | // 0 < k - len ≤ i32::MAX - len ≤ i32::MAX 272 | // Cast is valid for sure since len < k ≤ i32::MAX. 273 | k -= len as i32; 274 | // 0 < k ≤ i32::MAX 275 | }, 276 | } 277 | }, 278 | // NOTE: n.0 ≥ 0 279 | Op::Lift(n) => if n.0 < k { 280 | // 0 < k ≤ i32::MAX and 0 < k - n.0, and 0 ≤ n.0 ≤ i32::MAX 281 | // Cast is valid because i32 to i64 always is. 282 | lams += n.0 as i64; 283 | // 0 < k - n.0 ≤ i32::MAX - n.0 ≤ i32::MAX 284 | k -= n.0; 285 | // 0 < k ≤ i32::MAX 286 | } else { 287 | // 0 < k ≤ i32::MAX 288 | return Ok((Idx(k), Expr::Var(None))) 289 | }, 290 | Op::Shift(n) => { 291 | // 0 ≤ n.0 ≤ i32::MAX 292 | // Cast is valid for sure since i32 to i64 always is. 293 | lams += n.0 as i64; 294 | }, 295 | } 296 | // Since we never add more than i32::MAX to lams in a loop iteration, and ops.len() ≤ u32::MAX, 297 | // lams can never exceed i32::MAX * u32::MAX = (2^31 - 1) * (2^32 - 1) < i64::MAX. 298 | } 299 | // lams ≤ i32::MAX * u32::MAX and k ≤ i32::MAX 300 | // lams + k ≤ i32::MAX * (u32::MAX + 1) = (2^31 - 1) * 2^32 < i64::MAX 301 | // Cast of k to i64 is valid since u32 to i64 always is. 302 | // if self.id.0 < k, then 0 < k - self.id.0 ≤ i32::MAX. 303 | Ok((Idx(i32::try_from(lams + k as i64)?), Expr::Var(if self.id < k { Some(Idx(k - self.id)) } else { None }))) 304 | } 305 | 306 | /* /// Composition of substitutions: [comp mk_clos s1 s2] computes a 307 | /// substitution equivalent to applying s2 then s1. Argument 308 | /// mk_clos is used when a closure has to be created, i.e. when 309 | /// s1 is applied on an element of s2. 310 | /* pub fn comp(&mut self, mk_clos: (&mut T, &mut Subs) -> (), s: Subs) 311 | where I: BorrowMut<[T]> 312 | { 313 | 314 | } */ 315 | pub fn comp(mk_clos: (&mut T, &mut Subs) -> (), s1: Self, s2: Self) -> Self { 316 | let mut s; 317 | // 3 cases: 318 | // 1. s1 = CONS. 3 subcases: 319 | // i. s2 = CONS. Then CArray (new) 320 | // ii. s2 = SHIFT. Then reduce (recurse), SHIFT (new) or CONS (new) 321 | // iii. s2 = LIFT. Then always CONS (new) and sometimes LIFT (new) or CONS (new). 322 | // 2. s1 = LIFT. 3 subcases: 323 | // i. s2 = SHIFT. Then SHIFT (new, if k ≠ 0) and sometimes SHIFT (new) or LIFT (new). 324 | let mut is1 = s1.ops.iter().rev(); 325 | let mut is2 = s2.ops.iter().rev(); 326 | loop { 327 | if let Some(v2) = is2.next() { 328 | if let Some(v1) = is1.next() { 329 | match (*v1, *v2) { 330 | Op::Shift(k) => { 331 | // Shift after applying comp 332 | subs_shift(self.); 333 | } 334 | } 335 | } else { 336 | return Subs { ops: s2.ops, idx: s2.idx } 337 | } 338 | } else { 339 | return Subs { ops: s1.ops, idx: s1.idx }; 340 | } 341 | if let Some(v1) = s1. 342 | } 343 | } */ 344 | /*let rec comp mk_cl s1 s2 = 345 | match (s1, s2) with 346 | | _, ESID _ -> s1 347 | | ESID _, _ -> s2 348 | | SHIFT(k,s), _ -> subs_shft(k, comp mk_cl s s2) 349 | | _, CONS(x,s') -> 350 | CONS(CArray.Fun1.map (fun s t -> mk_cl(s,t)) s1 x, comp mk_cl s1 s') 351 | | CONS(x,s), SHIFT(k,s') -> 352 | let lg = Array.length x in 353 | if k == lg then comp mk_cl s s' 354 | else if k > lg then comp mk_cl s (SHIFT(k-lg, s')) 355 | else comp mk_cl (CONS(Array.sub x 0 (lg-k), s)) s' 356 | | CONS(x,s), LIFT(k,s') -> 357 | let lg = Array.length x in 358 | if k == lg then CONS(x, comp mk_cl s s') 359 | else if k > lg then CONS(x, comp mk_cl s (LIFT(k-lg, s'))) 360 | else 361 | CONS(Array.sub x (lg-k) k, 362 | comp mk_cl (CONS(Array.sub x 0 (lg-k),s)) s') 363 | | LIFT(k,s), SHIFT(k',s') -> 364 | if k 368 | if k 'a) -> 'a subs -> 'a subs -> 'a subs*/ 372 | 373 | /*let rec exp_rel lams k subs = 374 | match subs with 375 | | CONS (def,_) when k <= Array.length def 376 | -> Inl(lams,def.(Array.length def - k)) 377 | | CONS (v,l) -> exp_rel lams (k - Array.length v) l 378 | | LIFT (n,_) when k<=n -> Inr(lams+k,None) 379 | | LIFT (n,l) -> exp_rel (n+lams) (k-n) l 380 | | SHIFT (n,s) -> exp_rel (n+lams) k s 381 | | ESID n when k<=n -> Inr(lams+k,None) 382 | | ESID n -> Inr(lams+k,Some (k-n)) 383 | 384 | let expand_rel k subs = exp_rel 0 k subs*/ 385 | 386 | /// Tests whether a substitution is equal to the identity 387 | pub fn is_id(&self) -> bool 388 | where I: Borrow<[T]> 389 | { 390 | !self.ops.iter().any( |op| match *op { 391 | Op::Lift(_) => false, 392 | // NOTE: The below cannot happen with the current interface. 393 | // Op::Shift(Idx(0)) => false, 394 | // Op::Cons(ref x) => x.borrow().len() > 0, 395 | _ => true 396 | }) 397 | } 398 | } 399 | 400 | pub type SubsV = Subs>; 401 | 402 | /* (** {6 Compact representation } *) 403 | (** Compact representation of explicit relocations 404 | - [ELSHFT(l,n)] == lift of [n], then apply [lift l]. 405 | - [ELLFT(n,l)] == apply [l] to de Bruijn > [n] i.e under n binders. *) 406 | type lift = private 407 | | ELID 408 | | ELSHFT of lift * int 409 | | ELLFT of int * lift 410 | 411 | val el_id : lift 412 | val el_shft : int -> lift -> lift 413 | val el_liftn : int -> lift -> lift 414 | val el_lift : lift -> lift 415 | val reloc_rel : int -> lift -> int 416 | val is_lift_id : lift -> bool 417 | */ 418 | #[derive(Debug,Clone)] 419 | pub struct Lift { 420 | lifts: SVec, 421 | } 422 | 423 | impl Lift { 424 | pub fn id() -> Self { 425 | Lift { lifts: SVec::new(), } 426 | } 427 | 428 | fn push(&mut self, o: i32) -> IdxResult<()> { 429 | // TODO: Verify that u32 to usize is a safe upcast. 430 | if self.lifts.len() == u32::max_value() as usize { 431 | // lifts can never be larger than u32::MAX. 432 | return Err(IdxError(())); 433 | } 434 | self.lifts.push(o); 435 | Ok(()) 436 | } 437 | 438 | pub fn shift(&mut self, n: Idx) -> IdxResult<()> { 439 | if let Some(k) = self.lifts.last_mut() { 440 | // Coalesce shifts with shifts. 441 | *k = k.checked_add(n.0)?; 442 | return Ok(()) 443 | } 444 | return self.push(n.0) 445 | } 446 | 447 | pub fn liftn(&mut self, n: Idx) -> IdxResult<()> { 448 | match self.lifts.last_mut() { 449 | None => { 450 | // Lifts don't change ids 451 | return Ok(()) 452 | }, 453 | Some(&mut ref mut p) if *p < 0 => { 454 | // Coalesce lifts with lifts 455 | let p_ = p.checked_sub(n.0)?; 456 | return if p_ > i32::min_value() { 457 | // We need to make sure -p_ is still in range for 458 | // i32. 459 | *p = p_; 460 | Ok(()) 461 | } else { 462 | Err(IdxError(())) 463 | } 464 | }, 465 | _ => {}, 466 | } 467 | // n.0 is positive so making it negative is in bounds for sure. 468 | return self.push(-n.0) 469 | } 470 | 471 | pub fn lift(&mut self) -> IdxResult<()> { 472 | self.liftn(Idx(1)) 473 | } 474 | 475 | pub fn is_id(&self) -> bool { 476 | !self.lifts.iter().any( |i| *i > 0) 477 | } 478 | 479 | pub fn reloc_rel(&self, n: Idx) -> IdxResult { 480 | let mut n = n.0 as i64; 481 | let mut lams = 0 as i64; 482 | // INVARIANT: after x iterations through the loop, 0 ≤ lams, 0 < n, and 0 < lams + n ≤ (x + 1) * i32::MAX. 483 | // Basic idea: after every iteration of the loop, either only one of lams and n have been 484 | // increased by at most i32::MAX, or one has been increased by i32::MAX and the other 485 | // decreased by i32::MAX, such that neither goes negative. Therefore, the net sum of the 486 | // two is always ≤ (x + 1) * i32::MAX and neither is ever larger than x * i32::MAX. 487 | for k in self.lifts.iter().rev() { 488 | if *k < 0 { 489 | // Lift 490 | // Addition here safe because positive i64 plus negative i64 is always in bounds. 491 | let n_ = n + *k as i64; 492 | if n_ <= 0 { 493 | // 0 ≤ lams, 0 < n, and 0 < lams + n ≤ i32::MAX * (u32::MAX - 1 + 1) = (2^31 - 1) * (2^32 - 1) < i64::MAX 494 | return Ok(Idx(i32::try_from(lams + n)?)) 495 | } else { 496 | // 0 ≤ k ≤ i32::MAX, 0 ≤ lams, and 0 < n 497 | // 0 < n + k ≤ lams + n + k ≤ (x + 1) * i32::MAX + i32::MAX 498 | // 0 < n_ and 0 ≤ lams < lams - k and 0 < lams + n = lams - k + n + k ≤ (x + 1) * i32::MAX 499 | // 0 < lams - k + n_ ≤ (x + 1) * i32::MAX 500 | n = n_; 501 | // 0 < lams - k + n ≤ (x + 1) * i32::MAX, and 0 < lams - k < lams - k + n ≤ (x + 1) * i32::MAX < i64::MAX 502 | lams -= *k as i64; 503 | // 0 < lams + n ≤ (x + 1) * i32::MAX, and 0 < lams < lams + n ≤ (x + 1) * i32::MAX < i64::MAX 504 | } 505 | } else { 506 | // 0 ≤ k ≤ i32::MAX 507 | // Cast is valid for sure since i32 to i64 always is. 508 | // 0 < n + k ≤ lams + n + k ≤ (x + 1) * i32::MAX + i32::MAX = (x + 2) * i32::MAX < i64::MAX 509 | n += *k as i64; 510 | // 0 < lams + n ≤ (x + 2) * i32::MAX, 0 < n, and 0 ≤ lams. 511 | } 512 | // Since we never add more than i32::MAX to lams in a loop iteration, and lifts.len() ≤ u32::MAX, 513 | // n + lams can never exceed i32::MAX * (u32::MAX + 1) = (2^31 - 1) * 2^32 < i64::MAX. 514 | } 515 | // lams ≤ i32::MAX * u32::MAX and n ≤ i32::MAX 516 | // lams + n ≤ i32::MAX * (u32::MAX + 1) = (2^31 - 1) * 2^32 < i64::MAX 517 | Ok(Idx(i32::try_from(lams + n)?)) 518 | } 519 | /* pub fn shift(&mut self, n: usize) -> Result<(), TryFromIntError> { 520 | 521 | } */ 522 | 523 | // let el_id = ELID 524 | // (* compose a relocation of magnitude n *) 525 | // let rec el_shft_rec n = function 526 | // | ELSHFT(el,k) -> el_shft_rec (k+n) el 527 | // | el -> ELSHFT(el,n) 528 | // let el_shft n el = if Int.equal n 0 then el else el_shft_rec n el 529 | // 530 | // (* cross n binders *) 531 | // let rec el_liftn_rec n = function 532 | // | ELID -> ELID 533 | // | ELLFT(k,el) -> el_liftn_rec (n+k) el 534 | // | el -> ELLFT(n, el) 535 | // let el_liftn n el = if Int.equal n 0 then el else el_liftn_rec n el 536 | // 537 | // let el_lift el = el_liftn_rec 1 el 538 | // 539 | // (* relocation of de Bruijn n in an explicit lift *) 540 | // let rec reloc_rel n = function 541 | // | ELID -> n 542 | // | ELLFT(k,el) -> 543 | // if n <= k then n else (reloc_rel (n-k) el) + k 544 | // | ELSHFT(el,k) -> (reloc_rel (n+k) el) 545 | // 546 | // let rec is_lift_id = function 547 | // | ELID -> true 548 | // | ELSHFT(e,n) -> Int.equal n 0 && is_lift_id e 549 | // | ELLFT (_,e) -> is_lift_id e 550 | // 551 | 552 | } 553 | -------------------------------------------------------------------------------- /src/coq/kernel/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod esubst; 2 | pub mod names; 3 | -------------------------------------------------------------------------------- /src/coq/kernel/names.rs: -------------------------------------------------------------------------------- 1 | use coq::lib::hashcons::{Table, HashconsedType, Hlist, Hstring}; 2 | use std::cmp::{Ordering}; 3 | use std::collections::HashMap; 4 | use std::hash::{Hash, Hasher}; 5 | use std::ops::Deref; 6 | use ocaml::de::{ 7 | Str, 8 | }; 9 | use ocaml::values::{ 10 | Cst, 11 | Dp, 12 | Ind, 13 | Kn, 14 | List, 15 | Mp, 16 | Proj, 17 | UId, 18 | }; 19 | 20 | pub type HDp = Table; 21 | 22 | /// KnKey - can be used as a hash key for KnMap 23 | pub struct KnKey<'a>(pub &'a Kn); 24 | 25 | /// UserOrd - use the user name of the Kn 26 | #[derive(Clone, Copy, Debug)] 27 | pub struct KnUser<'a>(pub &'a Cst); 28 | 29 | /// CanOrd - use the canonical name of the Kn 30 | #[derive(Clone, Copy, Debug)] 31 | pub struct KnCan<'a>(pub &'a Cst); 32 | 33 | pub type MutInd = Cst; 34 | 35 | pub type MpMap = HashMap; 36 | 37 | pub type KnMap<'b, T> = HashMap, T>; 38 | 39 | /// The [*_env] modules consider an order on user part of names; 40 | /// the others consider an order on canonical part of names 41 | 42 | /// Note: this should be MutInd.UserOrd 43 | pub type MindMapEnv<'b, T> = HashMap, T>; 44 | 45 | /// Note: this should be Constant.UserOrd 46 | pub type CMapEnv<'b, T> = HashMap, T>; 47 | 48 | impl<'a> Deref for KnKey<'a> { 49 | type Target = Kn; 50 | 51 | fn deref(&self) -> &Kn { 52 | self.0 53 | } 54 | } 55 | 56 | impl<'a> Deref for KnUser<'a> { 57 | type Target = Kn; 58 | 59 | fn deref(&self) -> &Kn { 60 | self.0.user() 61 | } 62 | } 63 | 64 | impl<'a> Deref for KnCan<'a> { 65 | type Target = Kn; 66 | 67 | fn deref(&self) -> &Kn { 68 | self.0.canonical() 69 | } 70 | } 71 | 72 | impl Dp { 73 | pub fn compare(&self, mut p2: &Self) -> Ordering { 74 | let mut p1 = self; 75 | loop { 76 | if p1 as *const _ == p2 as *const _ { return Ordering::Equal } 77 | match (self, p2) { 78 | (&List::Nil, &List::Nil) => return Ordering::Equal, 79 | (&List::Nil, _) => return Ordering::Less, 80 | (_, &List::Nil) => return Ordering::Greater, 81 | (&List::Cons(ref o1), &List::Cons(ref o2)) => { 82 | let (ref id1, ref p1_) = **o1; 83 | let (ref id2, ref p2_) = **o2; 84 | match id1.cmp(id2) { 85 | Ordering::Equal => { 86 | p1 = p1_; 87 | p2 = p2_; 88 | }, 89 | c => return c, 90 | } 91 | } 92 | } 93 | } 94 | } 95 | 96 | pub fn equal(&self, p2: &Self) -> bool { 97 | let mut p1 = self; 98 | let mut p2 = p2; 99 | loop { 100 | if p1 as *const _ == p2 as *const _ { return true } 101 | match (p1, p2) { 102 | (&List::Nil, &List::Nil) => return true, 103 | (&List::Cons(ref o1), &List::Cons(ref o2)) => { 104 | let (ref id1, ref p1_) = **o1; 105 | let (ref id2, ref p2_) = **o2; 106 | if id1 != id2 { return false } 107 | p1 = &*p1_; 108 | p2 = &*p2_; 109 | }, 110 | (_, _) => return false, 111 | } 112 | } 113 | } 114 | 115 | // NOTE: We don't need to implement the hash function separately because it's identical to the 116 | // one autogenerated for HList. 117 | pub fn hash(&self) -> i64 { 118 | fn(&'b Hstring, Str) -> Str>>>::hash(self) 119 | } 120 | 121 | pub fn hcons<'a>(self, u: &'a HDp) -> Dp { 122 | self.hashcons(&(u, (), Hstring::hcons)) 123 | } 124 | } 125 | 126 | impl UId { 127 | pub fn equal(&self, y: &Self) -> bool { 128 | self as *const _ == y as *const _ || 129 | { 130 | let UId(i1, ref id1, ref p1) = *self; 131 | let UId(i2, ref id2, ref p2) = *y; 132 | i1 == i2 && id1 == id2 && p1.equal(p2) 133 | } 134 | } 135 | } 136 | 137 | impl Mp { 138 | pub fn equal(&self, mp2: &Self) -> bool { 139 | let mut mp1 = self; 140 | let mut mp2 = mp2; 141 | loop { 142 | if mp1 as *const _ == mp2 as *const _ { return true } 143 | match (mp1, mp2) { 144 | (&Mp::File(ref p1), &Mp::File(ref p2)) => return p1.equal(p2), 145 | (&Mp::Bound(ref id1), &Mp::Bound(ref id2)) => return id1.equal(id2), 146 | (&Mp::Dot(ref mp1_, ref l1), &Mp::Dot(ref mp2_, ref l2)) => { 147 | if l1 != l2 { return false } 148 | mp1 = &**mp1_; 149 | mp2 = &**mp2_; 150 | }, 151 | (&Mp::File(_), _) | (&Mp::Bound(_), _) | (&Mp::Dot(_, _), _) => return false, 152 | } 153 | } 154 | } 155 | } 156 | 157 | impl Kn { 158 | pub fn equal(&self, kn2: &Self) -> bool { 159 | let h1 = self.refhash; 160 | let h2 = kn2.refhash; 161 | if 0 <= h1 && 0 <= h2 && h1 != h2 { false } 162 | else { 163 | self.label == kn2.label && 164 | self.dirpath.equal(&kn2.dirpath) && 165 | self.modpath.equal(&kn2.modpath) 166 | } 167 | } 168 | 169 | /* let compare (kn1 : kernel_name) (kn2 : kernel_name) = 170 | if kn1 == kn2 then 0 171 | else 172 | let c = String.compare kn1.knlabel kn2.knlabel in 173 | if not (Int.equal c 0) then c 174 | else 175 | let c = DirPath.compare kn1.dirpath kn2.dirpath in 176 | if not (Int.equal c 0) then c 177 | else ModPath.compare kn1.modpath kn2.modpath */ 178 | } 179 | 180 | impl<'a> PartialEq for KnKey<'a> { 181 | fn eq(&self, kn2: &Self) -> bool { 182 | if &**self as *const _ == &**kn2 as *const _ { true } 183 | else { 184 | let c = self.label == kn2.label; 185 | if !c { false } 186 | else { 187 | // NOTE: equality and comparison = 0 coincide here 188 | let c = self.dirpath.equal(&kn2.dirpath); 189 | if !c { false } 190 | // NOTE: equality and comparison = 0 coincide here 191 | else { self.modpath.equal(&kn2.modpath) } 192 | } 193 | } 194 | } 195 | } 196 | 197 | impl<'a> Eq for KnKey<'a> {} 198 | 199 | impl<'a> Hash for KnKey<'a> { 200 | fn hash(&self, state: &mut H) { 201 | // NOTE: This is a bit weird because we're hasing an already-hashed value. I'm not sure 202 | // how this affects collision probability, but unless we're using a dummy hasher (which 203 | // would require us to specify the hashing algorithm everywhere else, which is silly) this 204 | // might not be a good thing. 205 | // 206 | // FIXME: More importantly, the hash caching is fundamentally incompatible with how Rust's 207 | // regular hash maps go. For now, just don't cache this. 208 | /* let h = self.refhash; 209 | if h < 0 { 210 | let h = self.dirpath.hash(); */ 211 | Hash::hash(&self.modpath, state); 212 | Hash::hash(&self.dirpath, state); 213 | Hash::hash(&self.label, state); 214 | // /// Ensure positivity on all platforms 215 | // /// FIXME: Whoa, this is wildly unsafe, in general! 216 | // self.refhash.hash = h; 217 | /*} else { 218 | state.hash(h); 219 | let { modpath = mp; dirpath = dp; knlabel = lbl; } = kn in 220 | let h = combine3 (ModPath.hash mp) (DirPath.hash dp) (Label.hash lbl) in 221 | (* Ensure positivity on all platforms. *) 222 | let h = h land 0x3FFFFFFF in 223 | let () = kn.refhash <- h in 224 | 225 | } else { h } 226 | /* let h = kn.refhash in 227 | if h < 0 then 228 | let { modpath = mp; dirpath = dp; knlabel = lbl; } = kn in 229 | let h = combine3 (ModPath.hash mp) (DirPath.hash dp) (Label.hash lbl) in 230 | (* Ensure positivity on all platforms. *) 231 | let h = h land 0x3FFFFFFF in 232 | let () = kn.refhash <- h in 233 | h 234 | else h */ 235 | self.user().hash(state); */ 236 | } 237 | } 238 | 239 | impl Cst { 240 | pub fn canonical(&self) -> &Kn { 241 | match *self { 242 | Cst::Same(ref kn) => kn, 243 | Cst::Dual(ref o) => { 244 | let (_, ref kn) = **o; 245 | kn 246 | }, 247 | } 248 | } 249 | 250 | pub fn user(&self) -> &Kn { 251 | match *self { 252 | Cst::Same(ref kn) => kn, 253 | Cst::Dual(ref o) => { 254 | let (ref kn, _) = **o; 255 | kn 256 | }, 257 | } 258 | } 259 | } 260 | 261 | impl<'a> PartialEq for KnUser<'a> { 262 | fn eq(&self, y: &Self) -> bool { 263 | &**self as *const _ == &**y as *const _ || self.equal(&y) 264 | } 265 | } 266 | 267 | impl<'a> Eq for KnUser<'a> {} 268 | 269 | impl<'a> Hash for KnUser<'a> { 270 | fn hash(&self, state: &mut H) { 271 | KnKey(&**self).hash(state); 272 | } 273 | } 274 | 275 | impl<'a> PartialEq for KnCan<'a> { 276 | fn eq(&self, y: &Self) -> bool { 277 | &**self as *const _ == &**y as *const _ || self.equal(&y) 278 | } 279 | } 280 | 281 | impl<'a> Eq for KnCan<'a> {} 282 | 283 | impl<'a> Hash for KnCan<'a> { 284 | fn hash(&self, state: &mut H) { 285 | KnKey(&**self).hash(state); 286 | } 287 | } 288 | 289 | impl MutInd { 290 | pub fn eq_mind_chk(&self, y: &Self) -> bool { 291 | KnUser(self) == KnUser(y) 292 | } 293 | } 294 | 295 | impl Cst { 296 | pub fn eq_con_chk(&self, y: &Self) -> bool { 297 | KnUser(self) == KnUser(y) 298 | } 299 | } 300 | 301 | impl Ind { 302 | pub fn eq_ind_chk(&self, y: &Self) -> bool { 303 | self.pos == y.pos && self.name.eq_mind_chk(&y.name) 304 | } 305 | } 306 | 307 | impl Proj { 308 | pub fn equal(&self, y: &Self) -> bool { 309 | // NOTE: because it uses the default KerPair equality, which is Canonical. 310 | KnCan(&self.0) == KnCan(&y.0) && self.1 == y.1 311 | } 312 | } 313 | -------------------------------------------------------------------------------- /src/coq/lib/c_array.rs: -------------------------------------------------------------------------------- 1 | use ocaml::de::{Array}; 2 | use std::borrow::{Borrow}; 3 | use std::sync::{Arc}; 4 | 5 | impl Array { 6 | /// If none of the elements is changed by f we return ar itself. 7 | /// The while loop looks for the first such an element. 8 | /// If found, we break here and the new array is produced, 9 | /// but f is not re-applied to elements that are already checked 10 | /// 11 | /// Different from OCaml in that it takes a PER (E) instead of always using pointer equality, 12 | /// since things in the array aren't necessarily boxed, and it doesn't require the map to 13 | /// produce element of exactly type T, only of type U (as long as you can get a borrowed T back 14 | /// out of U). This allows you to, e.g., have F go from &T to Cow, and then have 15 | /// equivalence as pointer equality so you can determine whether it's really the same 16 | /// reference. Note that you also must be able to go from a U to a T. 17 | /// 18 | /// Also differs from the OCaml implementation because it allows for an Err option for its map 19 | /// elements. If the thing you're mapping can't actually return an error, you can pass ! in 20 | /// its stead. 21 | /// ! in its stead. 22 | pub fn smart_map(&self, mut f: F, eq: Equiv) -> Result, E> 23 | where 24 | T: Clone, 25 | U: Borrow, 26 | F: FnMut(&T) -> Result, 27 | U: Into, 28 | Equiv: Fn(&T, &T) -> bool, 29 | { 30 | // Don't even bother allocating until we detect a difference. 31 | // NOTE: This might not be a useful microoptimization in Rust... 32 | // in fact, it might not be an optimization at all! 33 | for (i, v) in (&self).iter().enumerate() { 34 | let v_ = f(v)?; 35 | if !eq(v, v_.borrow()) { 36 | // The array had at least one new element, so we do have to allocate. 37 | let mut vec = Vec::with_capacity(self.len()); 38 | // The below is safe because i < self.len(). 39 | vec.extend_from_slice(&self[..i]); 40 | vec.push(v_.into()); 41 | // Observe that unlike the OCaml implementation, we don't repeat the check 42 | // for whether we can reuse v in these cases, because either way we have to 43 | // perform a Clone (since we don't actually own T in the first place). 44 | // The below is safe because i + 1 ≤ self.len() 45 | for v in &self[i + 1..] { 46 | vec.push(f(v)?.into()); 47 | } 48 | return Ok(Array(Arc::new(vec))); 49 | } 50 | } 51 | // There were no changes, so just clone this Arc. We could also use a Cow if we didn't 52 | // always have an Arc around anyway... 53 | return Ok(self.clone()); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/coq/lib/c_map.rs: -------------------------------------------------------------------------------- 1 | use ocaml::de::{ORef, Seed}; 2 | use ocaml::values::{ 3 | Int, 4 | CMap, 5 | Map, 6 | }; 7 | use serde; 8 | use std::collections::{HashMap}; 9 | use std::hash::{Hash}; 10 | 11 | // An iterator specialized to CMaps. 12 | pub struct CMapIter<'a, K, V> where K: 'a, V: 'a { 13 | stack: Vec<&'a ORef<(CMap, K, V, CMap, Int)>>, 14 | node: &'a CMap, 15 | } 16 | 17 | impl<'a, K, V> CMapIter<'a, K, V> { 18 | // TODO: Consider using height for size hint somehow (tree is balanced, so should be useful). 19 | fn new(node: &'a CMap) -> Self { 20 | CMapIter { 21 | stack: Vec::new(), 22 | node: node, 23 | } 24 | } 25 | } 26 | 27 | impl<'a, K, V> Iterator for CMapIter<'a, K, V> { 28 | type Item = (&'a K, &'a V); 29 | 30 | // Note: order preserving (inorder traversal), though this isn't actually useful to us right 31 | // now. 32 | // Also note: if there were a cycle (which there shouldn't be) in the original Map, 33 | // this could loop forever. But if used as intended (from a DeserializeSeed), this is unlikely 34 | // to happen, since DeserializeSeed will already loop forever in that case... 35 | fn next(&mut self) -> Option<(&'a K, &'a V)> { 36 | loop { 37 | match *self.node { 38 | CMap::Nil => { 39 | let node = if let Some(node) = self.stack.pop() { node } else { return None }; 40 | let (_, ref k, ref v, ref right, _) = **node; 41 | self.node = right; 42 | return Some((k, v)) 43 | }, 44 | CMap::Node(ref node) => { 45 | let (ref left, _, _, _, _) = **node; 46 | self.stack.push(node); 47 | self.node = left; 48 | }, 49 | } 50 | } 51 | } 52 | } 53 | 54 | impl CMap { 55 | pub fn iter<'a>(&'a self) -> CMapIter<'a, K, V> { 56 | CMapIter::new(self) 57 | } 58 | } 59 | 60 | impl<'de, K, V> serde::de::DeserializeState<'de, Seed<'de>> for Map 61 | where K: Hash + Eq + Clone + Send + Sync + 'static, 62 | V: Send + Sync + Clone + 'static, 63 | K: serde::de::DeserializeState<'de, Seed<'de>>, 64 | V: serde::de::DeserializeState<'de, Seed<'de>>, 65 | { 66 | fn deserialize_state<'seed, D>(seed: &'seed mut Seed<'de>, deserializer: D) -> Result 67 | where 68 | D: serde::de::Deserializer<'de>, 69 | { 70 | // Lazy: we just deserialize the CMap, then add everything to a HashMap. 71 | let cmap: CMap = CMap::deserialize_state(seed, deserializer)?; 72 | Ok(Map(cmap.iter().map( |(k, v)| (k.clone(), v.clone())).collect())) 73 | } 74 | } 75 | 76 | impl ::std::ops::Deref for Map where K: Hash + Eq { 77 | type Target = HashMap; 78 | fn deref(&self) -> &HashMap { 79 | &self.0 80 | } 81 | } 82 | 83 | impl ::std::ops::DerefMut for Map where K: Hash + Eq { 84 | fn deref_mut(&mut self) -> &mut HashMap { 85 | &mut self.0 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /src/coq/lib/hashcons.rs: -------------------------------------------------------------------------------- 1 | /// Generic hash-consing. 2 | 3 | use coq::lib::hashset::combine; 4 | use ocaml::de::{ 5 | ORef, 6 | Str, 7 | }; 8 | use ocaml::values::{ 9 | List, 10 | }; 11 | use std::sync::{Arc}; 12 | 13 | pub use self::make::{Table}; 14 | 15 | /// Hashconsing functorial interface 16 | 17 | /// Generic hashconsing signature 18 | /// 19 | /// Given an equivalence relation [eq], a hashconsing function is a 20 | /// function that associates the same canonical element to two elements 21 | /// related by [eq]. Usually, the element chosen is canonical w.r.t. 22 | /// physical equality [(==)], so as to reduce memory consumption and 23 | /// enhance efficiency of equality tests. 24 | /// 25 | /// In order to ensure canonicality, we need a way to remember the element 26 | /// associated to a class of equivalence; this is done using the table type 27 | /// generated by the [Make] functor. 28 | pub trait HashconsedType< 29 | /// Type of hashcons functions for the sub-structures contained in [t]. 30 | /// Usually a tuple of functions. 31 | U, 32 | > { 33 | /// The actual hashconsing function, using its fist argument to recursively 34 | /// hashcons substructures. It should be compatible with [eq], that is 35 | /// [eq x (hashcons f x) = true]. 36 | fn hashcons<'a>(self, &'a U) -> Self 37 | where Self: ToOwned; 38 | 39 | /// A comparison function. It is allowed to use physical equality 40 | /// on the sub-terms hashconsed by the [hashcons] function, but it should be 41 | /// insensible to shallow copy of the compared object. 42 | fn eq(&self, &Self) -> bool; 43 | 44 | /// A hash function passed to the underlying hashtable structure. [hash] 45 | /// should be compatible with [eq], i.e. if [eq x y = true] then 46 | /// [hash x = hash y]. 47 | fn hash(&self) -> i64; 48 | } 49 | 50 | mod make { 51 | /// Create a new hashconsing, given canonicalization functions. 52 | 53 | use coq::lib::hashcons::HashconsedType; 54 | use cuckoo::{CuckooHashMap}; 55 | use std::hash::{BuildHasherDefault, Hash, Hasher}; 56 | use std::marker::{PhantomData}; 57 | 58 | /// Dummy hasher implementation, used internally. 59 | /// 60 | /// The reason this is a dummy is that none of the hashes used in Coq are parametric over the 61 | /// hash function. Maybe they *should* be, but since they aren't, having custom state here is 62 | /// not useful. 63 | /// TODO: Investigate changing this (but keep in mind that if we choose a different hash 64 | /// function from the one used by Coq, loading things from .vo files will likely get a lot more 65 | /// complicated, since we won't be able to assume that their hashes are meaninful). 66 | /// FIXME: Hey, wait, why can we assume their hashes are meaningful in the checker, anyway? 67 | /// Couldn't they be any integer an attacker chose? This might be an exploitable soundness bug 68 | /// (but hopefully the worst it can lead to is false negatives). 69 | #[derive(Default)] 70 | pub struct KeyHasher(i64); 71 | 72 | /// Wrapper around the Key for which we can generate our own Hash and Eq instances. 73 | pub struct Key { 74 | key: T, 75 | marker: PhantomData, 76 | } 77 | 78 | /// The concrete implementation of our concurrent hash table. 79 | /// 80 | /// FIXME: We should make something optimized for weak pointers (which would be amenable to 81 | /// hash consing). 82 | pub type Htbl = CuckooHashMap, T, BuildHasherDefault>; 83 | 84 | /// Type of hashconsing tables 85 | pub struct Table { 86 | pub tab: Htbl, 87 | pub u: U, 88 | } 89 | 90 | impl Default for Table 91 | where 92 | T: HashconsedType, 93 | U: Default, 94 | { 95 | fn default() -> Self { 96 | Table { 97 | tab: Htbl::default(), 98 | u: U::default(), 99 | } 100 | } 101 | } 102 | 103 | impl Hasher for KeyHasher { 104 | fn finish(&self) -> u64 { 105 | // FIXME: Ensure this is not negative. 106 | self.0 as u64 107 | } 108 | 109 | fn write(&mut self, _bytes: &[u8]) { /* noop */ } 110 | 111 | /// We just use this to directly write our hash to the Hasher. 112 | fn write_i64(&mut self, i: i64) { 113 | self.0 = i; 114 | } 115 | } 116 | 117 | impl Key { 118 | pub fn new(key: T) -> Self { 119 | Key { 120 | key: key, 121 | marker: PhantomData, 122 | } 123 | } 124 | } 125 | 126 | impl Hash for Key where T: HashconsedType { 127 | fn hash(&self, state: &mut H) 128 | where H: Hasher, 129 | { 130 | // When used with the KeyHasher, this will ensure that the final hash is the same as 131 | // this hash. 132 | state.write_i64(self.key.hash()); 133 | } 134 | } 135 | 136 | impl PartialEq for Key where T: HashconsedType { 137 | fn eq(&self, other: &Self) -> bool { 138 | self.key.eq(&other.key) 139 | } 140 | } 141 | 142 | impl Eq for Key where T: HashconsedType {} 143 | 144 | /// This create a hashtable of the hashconsed objects. 145 | impl Table where T: HashconsedType, 146 | { 147 | pub fn generate(u: U) -> Self { 148 | let tab = Htbl::with_capacity_and_hash_state(97, Default::default()); 149 | Table { 150 | tab: tab, 151 | u: u 152 | } 153 | } 154 | 155 | /// Perform the hashconsing of the given object within the table. 156 | pub fn hcons<'a>(&'a self, x: T) -> T 157 | where 158 | T: ToOwned, 159 | { 160 | let y = x.hashcons(&self.u); 161 | // t.tab.repr(y.hash(), y) 162 | y 163 | } 164 | } 165 | } 166 | 167 | /// list 168 | pub type Hlist<'a, T, U, V, H> = (&'a Table, U>, V, H); 169 | 170 | impl<'a, T, U, V, H> HashconsedType> for List 171 | where 172 | T: Clone, 173 | T: HashconsedType, 174 | H: for<'b> Fn(&'b U, T) -> T, 175 | { 176 | fn hashcons<'b>(self, th: &'b Hlist<'a, T, U, V, H>) -> Self { 177 | let y = match self { 178 | List::Cons(o) => { 179 | let (ref x, ref l) = *o; 180 | let (ref tab, _, ref h) = *th; 181 | let x = h(&tab.u, x.clone()); 182 | let l = (&*l).clone().hashcons(th); 183 | List::Cons(ORef(Arc::new((x, l)))) 184 | }, 185 | List::Nil => List::Nil 186 | }; 187 | // t.tab.repr(y.hash(), y) 188 | y 189 | } 190 | 191 | fn eq(&self, l2: &Self) -> bool { 192 | self as *const _ == l2 as *const _ || 193 | match (self, l2) { 194 | (&List::Nil, &List::Nil) => true, 195 | (&List::Cons(ref o1), &List::Cons(ref o2)) => { 196 | &**o1 as *const _ == &**o2 as *const _ 197 | }, 198 | _ => false, 199 | } 200 | } 201 | 202 | fn hash(&self) -> i64 { 203 | // FIXME: Overflow. 204 | self.iter().fold(0, |accu, x| combine::combine(x.hash(), accu)) 205 | } 206 | } 207 | 208 | /// string 209 | pub type Hstring = Table; 210 | 211 | impl HashconsedType<()> for Str { 212 | fn hashcons(self, _: &()) -> Self { 213 | self 214 | } 215 | 216 | fn eq(&self, s2: &Self) -> bool { 217 | self == s2 218 | } 219 | 220 | fn hash(&self) -> i64 { 221 | // The cast to i64 is safe because casting u8 to i64 is always valid. 222 | // FIXME: Overflow (wait, does overflow actually matter here?) 223 | self.iter().fold(0, |accu, c| accu * 19 + *c as i64) 224 | } 225 | } 226 | -------------------------------------------------------------------------------- /src/coq/lib/hashset.rs: -------------------------------------------------------------------------------- 1 | pub mod combine { 2 | /// These are helper functions to combine the hash keys in a similar 3 | /// way as [Hashtbl.hash] does. The constants [alpha] and [beta] must 4 | /// be prime numbers. There were chosen empirically. Notice that the 5 | /// problem of hashing trees is hard and there are plenty of study on 6 | /// this topic. Therefore, there must be room for improvement here. 7 | const ALPHA : i64 = 65599; 8 | const BETA : i64 = 7; 9 | 10 | pub const fn combine(x: i64, y: i64) -> i64 { 11 | // FIXME: Handle overflow. Actually, does it even matter here? 12 | x * ALPHA + y 13 | } 14 | 15 | pub const fn combine3(x: i64, y: i64, z: i64) -> i64 { 16 | combine(x, combine(y, z)) 17 | } 18 | 19 | pub const fn combine4(x: i64, y: i64, z: i64, t: i64) -> i64 { 20 | combine(x, combine3(y, z, t)) 21 | } 22 | 23 | pub const fn combine5(x: i64, y: i64, z: i64, t: i64, u: i64) -> i64 { 24 | combine(x, combine4(y, z, t, u)) 25 | } 26 | 27 | pub const fn combinesmall(x: i64, y: i64) -> i64 { 28 | // FIXME: Handle overflow. Actually, does it even matter here? 29 | BETA * x + y 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/coq/lib/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod c_array; 2 | pub mod c_map; 3 | pub mod hashcons; 4 | pub mod hashset; 5 | -------------------------------------------------------------------------------- /src/coq/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod checker; 2 | pub mod kernel; 3 | pub mod lib; 4 | -------------------------------------------------------------------------------- /src/hopcroft/hopcroft.rs: -------------------------------------------------------------------------------- 1 | use std::cmp::{Ord, Ordering}; 2 | use std::vec::Vec; 3 | use hopcroft::partition::{Partition, Set}; 4 | 5 | pub enum StateT {} 6 | enum TransitionT {} 7 | 8 | type State = usize; 9 | 10 | pub struct Transition { 11 | lbl : L, 12 | // Label of the transition 13 | src : State, 14 | // Source state 15 | dst : State, 16 | // Target state 17 | } 18 | 19 | impl Transition { 20 | 21 | /// Create a new transition with the provided label, source and target 22 | pub fn new(lbl : L, src : usize, dst : usize) -> Transition { 23 | Transition { lbl : lbl, src : src, dst : dst } 24 | } 25 | 26 | } 27 | 28 | pub struct Automaton { 29 | /// Number of states 30 | pub states : usize, 31 | /// List of unique transitions between states 32 | pub transitions : Box<[Transition]>, 33 | } 34 | 35 | struct Environment { 36 | /// Current partition of the states 37 | state_partition : Partition, 38 | /// Current partition of the transitions 39 | transition_partition : Partition, 40 | /// Associate to each transition its source 41 | transition_source : Box<[usize]>, 42 | /// Associate to each state the list of transitions that ends in it 43 | state_pred_trans : Box<[Vec]>, 44 | /// Partitions waiting to be processed 45 | partition_todo : Vec>, 46 | } 47 | 48 | /// Associate the list of transitions ending in a given state 49 | fn reverse(automaton : &Automaton) -> Box<[Vec]> { 50 | let mut ans = Vec::with_capacity(automaton.states); 51 | for _ in 0..automaton.states { 52 | ans.push(Vec::new()); 53 | } 54 | let mut ans = ans.into_boxed_slice(); 55 | let mut i : usize = 0; 56 | for trans in automaton.transitions.into_iter() { 57 | ans[trans.dst].push(i); 58 | i = i + 1; 59 | } 60 | ans 61 | } 62 | 63 | fn init(automaton : &mut Automaton) -> Environment { 64 | // Sort transitions according to their label 65 | automaton.transitions.sort_by(|x, y| { Ord::cmp(&(x.lbl), &(y.lbl)) }); 66 | // Initialize the environment 67 | let len = automaton.transitions.len(); 68 | let st_part = Partition::new(automaton.states); 69 | let mut sp_part = Partition::new(len); 70 | let mut trans_src = Vec::with_capacity(len); 71 | // Set the source of the transitions 72 | for i in 0..len { trans_src.push(automaton.transitions[i].src) } 73 | let trans_src = trans_src.into_boxed_slice(); 74 | // Split splitters according to their label 75 | if len > 0 { 76 | let mut label0 = &(automaton.transitions[0].lbl); 77 | // pt is initial, full partition 78 | let pt = sp_part.partition(0); 79 | for i in 0..len { 80 | // Each time the label changes, we split 81 | let label = &(automaton.transitions[i].lbl); 82 | let _ = match Ord::cmp(label, label0) { 83 | Ordering::Equal => {} 84 | _ => { 85 | let _ = sp_part.split(pt); 86 | label0 = label; 87 | } 88 | }; 89 | sp_part.mark(i); 90 | } 91 | let _ = sp_part.split(pt); 92 | } 93 | // Push every splitter in the todo stack 94 | let mut todo = Vec::with_capacity(sp_part.len()); 95 | for partition in sp_part.into_iter() { 96 | todo.push(partition); 97 | } 98 | Environment { 99 | state_partition : st_part, 100 | transition_partition : sp_part, 101 | transition_source : trans_src, 102 | state_pred_trans : reverse(automaton), 103 | partition_todo : todo, 104 | } 105 | } 106 | 107 | fn split_partition(s : Set, env : &mut Environment, splitter_touched : &mut Vec>) { 108 | assert!(splitter_touched.is_empty()); 109 | let r = match env.state_partition.split(s) { 110 | None => { return; } 111 | Some (r) => { r } 112 | }; 113 | let r = if env.state_partition.size(r) < env.state_partition.size(s) { r } else { s }; 114 | for state in env.state_partition.class(r).into_iter() { 115 | let ref preds = env.state_pred_trans[state]; 116 | for trans in preds { 117 | let pt = env.transition_partition.partition(*trans); 118 | if !env.transition_partition.is_marked(pt) { 119 | splitter_touched.push(pt); 120 | }; 121 | env.transition_partition.mark(*trans); 122 | } 123 | } 124 | for pt in splitter_touched.drain(..) { 125 | match env.transition_partition.split(pt) { 126 | None => (), 127 | Some (npt) => { env.partition_todo.push(npt) }, 128 | } 129 | } 130 | } 131 | 132 | fn reduce_loop(env : &mut Environment, state_touched : &mut Vec>, splitter_touched : &mut Vec>) -> bool { 133 | assert!(state_touched.is_empty()); 134 | assert!(splitter_touched.is_empty()); 135 | match env.partition_todo.pop() { 136 | None => false, 137 | Some (pt) => { 138 | for trans in env.transition_partition.class(pt).into_iter() { 139 | let previous = env.transition_source[trans]; 140 | let equiv = env.state_partition.partition(previous); 141 | if !env.state_partition.is_marked(equiv) { 142 | state_touched.push(equiv); 143 | } 144 | env.state_partition.mark(previous); 145 | } 146 | for state in state_touched.drain(..) { 147 | split_partition(state, env, splitter_touched); 148 | } 149 | true 150 | } 151 | } 152 | } 153 | 154 | impl Automaton { 155 | 156 | /// Associate the equivalence classes of the states of an automaton 157 | pub fn reduce(&mut self) -> Partition { 158 | let mut env = init(self); 159 | let mut state_touched = Vec::new(); 160 | let mut splitter_touched = Vec::new(); 161 | while reduce_loop(&mut env, &mut state_touched, &mut splitter_touched) {}; 162 | env.state_partition 163 | } 164 | 165 | } -------------------------------------------------------------------------------- /src/hopcroft/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod partition; 2 | pub mod hopcroft; 3 | -------------------------------------------------------------------------------- /src/hopcroft/partition.rs: -------------------------------------------------------------------------------- 1 | use std::vec::Vec; 2 | use std::iter::IntoIterator; 3 | use std::marker::PhantomData; 4 | 5 | pub struct Set(usize, PhantomData<*const T>); 6 | 7 | impl Copy for Set {} 8 | impl Clone for Set { fn clone(&self) -> Set { *self } } 9 | 10 | struct Info { 11 | /// index of the first element of a partition 12 | first : usize, 13 | /// successor index of the last element of a partition 14 | last : usize, 15 | /// index of the last marked element of a partition 16 | marked : usize, 17 | } 18 | 19 | pub struct Partition { 20 | /// data relative to each partition 21 | partinfo : Vec, 22 | /// associate a partition to an element 23 | index : Box<[Set]>, 24 | /// contain elements in a contiguous way w.r.t. partitions 25 | elements : Box<[usize]>, 26 | /// keep the location of an element in [elements] 27 | location : Box<[usize]>, 28 | } 29 | 30 | impl Set { 31 | 32 | pub fn to_usize (self) -> usize { let Set(i, _) = self; i } 33 | 34 | pub fn of_usize (i : usize) -> Set { 35 | Set(i, PhantomData) 36 | } 37 | 38 | } 39 | 40 | fn initial_size (n : usize) -> usize { n / 100 } 41 | 42 | impl Partition { 43 | 44 | /// Create a new partition holding `n` elements. All elements are initially 45 | /// member of the same partition. 46 | pub fn new (n : usize) -> Partition { 47 | let mut partinfo = Vec::with_capacity(initial_size(n)); 48 | let mut index = Vec::with_capacity(n); 49 | let mut elements = Vec::with_capacity(n); 50 | let mut location = Vec::with_capacity(n); 51 | partinfo.push(Info { first : 0, last : n, marked : 0 }); 52 | for i in 0..n { 53 | index.push(Set::of_usize(0)); 54 | elements.push(i); 55 | location.push(i); 56 | } 57 | Partition { 58 | partinfo : partinfo, 59 | index : index.into_boxed_slice(), 60 | elements : elements.into_boxed_slice(), 61 | location : location.into_boxed_slice(), 62 | } 63 | } 64 | 65 | /// Number of partitions held by the datastructure. 66 | pub fn len (&self) -> usize { Vec::len(&self.partinfo) } 67 | 68 | /// Number of elements in a partition. 69 | pub fn size (&self, i : Set) -> usize { 70 | let Set(i, _) = i; 71 | let ref info = self.partinfo[i]; 72 | info.last - info.first 73 | } 74 | 75 | /// Return the partition an element is in. 76 | pub fn partition(&self, n : usize) -> Set { self.index[n].clone() } 77 | 78 | } 79 | 80 | pub struct PartitionIter { 81 | off : usize, 82 | max : usize, 83 | phantom : PhantomData<*const T>, 84 | } 85 | 86 | impl Iterator for PartitionIter { 87 | type Item = Set; 88 | fn next (&mut self) -> Option> { 89 | if self.max == self.off { None } else { 90 | let ans = self.off; 91 | self.off = self.off + 1; 92 | Some(Set::of_usize(ans)) 93 | } 94 | } 95 | } 96 | 97 | impl<'a, T> IntoIterator for &'a Partition { 98 | type Item = Set; 99 | type IntoIter = PartitionIter; 100 | fn into_iter(self) -> PartitionIter { 101 | PartitionIter { off : 0, max : self.len(), phantom : PhantomData } 102 | } 103 | } 104 | 105 | pub struct SetIter<'a, T> { 106 | off : usize, 107 | max : usize, 108 | ptr : &'a[usize], 109 | phantom : PhantomData<*const T>, 110 | } 111 | 112 | impl <'a, T> Iterator for SetIter<'a, T> { 113 | type Item = usize; 114 | fn next (&mut self) -> Option { 115 | if self.max == self.off { None } else { 116 | let ans = self.ptr[self.off]; 117 | self.off = self.off + 1; 118 | Some(ans) 119 | } 120 | } 121 | } 122 | 123 | impl <'a, T> Partition { 124 | 125 | pub fn class(&'a self, i : Set) -> SetIter<'a, T> { 126 | let Set(i, _) = i; 127 | let ref info = self.partinfo[i]; 128 | SetIter { 129 | off : info.first, 130 | max : info.last, 131 | ptr : self.elements.as_ref(), 132 | phantom : PhantomData, 133 | } 134 | } 135 | 136 | } 137 | 138 | impl Partition { 139 | 140 | /// Split a partition between marked and unmarked elements. If this creates a 141 | /// new partition, it is returned. Otherwise it returns `None`. 142 | pub fn split (&mut self, i : Set) -> Option> { 143 | let Set(i, _) = i; 144 | let new = { 145 | let info = &mut self.partinfo[i]; 146 | if info.marked == info.last { info.marked = info.first; return None; } 147 | if info.marked == info.first { return None; } 148 | let ninfo = Info { 149 | first : info.first, 150 | last : info.marked, 151 | marked : info.first, 152 | }; 153 | info.first = info.marked; 154 | ninfo 155 | }; 156 | let len = self.partinfo.len(); 157 | for i in new.first..new.last { 158 | self.index[self.elements[i]] = Set::of_usize(len); 159 | } 160 | self.partinfo.push(new); 161 | Some (Set::of_usize(len)) 162 | } 163 | 164 | pub fn mark(&mut self, i : usize) { 165 | let Set(set, _) = self.index[i]; 166 | let loc = self.location[i]; 167 | let mark = self.partinfo[set].marked; 168 | if mark <= loc { 169 | self.elements[loc] = self.elements[mark]; 170 | self.location[self.elements[loc]] = loc; 171 | self.elements[mark] = i; 172 | self.location[i] = mark; 173 | self.partinfo[set].marked = mark + 1; 174 | } 175 | } 176 | 177 | pub fn is_marked(&self, i : Set) -> bool { 178 | let Set(i, _) = i; 179 | let ref info = self.partinfo[i]; 180 | info.marked != info.first 181 | } 182 | 183 | pub fn choose(&self, i : Set) -> usize { 184 | let Set(i, _) = i; 185 | self.elements[self.partinfo[i].first] 186 | } 187 | 188 | } 189 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // #![feature(placement_in_syntax)] 2 | #![feature(const_fn)] 3 | #![feature(rc_downcast)] 4 | #![feature(try_from)] 5 | #![feature(try_trait)] 6 | #![feature(nonzero)] 7 | #![feature(never_type)] 8 | #![feature(drain_filter)] 9 | #![feature(generic_param_attrs)] 10 | extern crate fixedbitset; 11 | #[macro_use] extern crate serde_state as serde; 12 | #[macro_use] extern crate serde_derive_state; 13 | 14 | extern crate core; 15 | extern crate cuckoo; 16 | extern crate lazy_init; 17 | extern crate movecell; 18 | extern crate rayon; 19 | extern crate smallvec; 20 | extern crate take_mut; 21 | extern crate typed_arena; 22 | extern crate vec_map; 23 | 24 | #[macro_use] 25 | extern crate bitflags; 26 | 27 | pub mod ocaml; 28 | pub mod coq; 29 | pub mod hopcroft; 30 | pub mod util; 31 | -------------------------------------------------------------------------------- /src/ocaml/compact.rs: -------------------------------------------------------------------------------- 1 | use hopcroft::hopcroft::{Automaton, Transition, StateT}; 2 | use hopcroft::partition::{Partition}; 3 | use ocaml::marshal::{ObjRepr, Obj, Field, Memory}; 4 | 5 | #[derive (Clone, PartialEq, PartialOrd, Eq, Ord)] 6 | enum Label { 7 | Tag (u8), // self tag 8 | Int (i64), // integer 9 | Atm (usize, u8), // index, tag 10 | Fld (usize), // pointer to another block 11 | Str (Box<[u8]>), // string 12 | } 13 | 14 | fn push(trs : &mut Vec>, lbl : Label, src : usize, dst : usize) { 15 | let t = Transition::new(lbl, src, dst); 16 | trs.push(t) 17 | } 18 | 19 | fn to_automaton(obj : &ObjRepr) -> Automaton